code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module Flowskell.Lib.Shaders where
import Prelude hiding ( sum )
import Control.Applicative
import Control.Monad
import Control.Exception
import Data.Foldable ( Foldable, sum )
import Data.IORef
import Data.Maybe (fromJust)
import Data.Array (elems)
import Data.List (elemIndex)
import Graphics.UI.GLUT hiding (Float)
import Language.Scheme.Types
import Flowskell.State (State(..))
import Flowskell.SchemeUtils (extractFloat)
import Flowskell.ShaderUtils (readCompileAndLink)
extractGLfloat :: LispVal -> GLfloat
extractGLfloat = realToFrac . extractFloat
loadShader :: IORef [Maybe Program] -> [LispVal] -> IO LispVal
loadShader shdLstRef [String vert, String frag] = do
shdLst <- get shdLstRef
prg <- readCompileAndLink vert frag
writeIORef shdLstRef (shdLst ++ [Just prg])
return $ Number (fromIntegral (length shdLst))
loadShader shdLstRef [String name] = do
loadShader shdLstRef [String vert, String frag]
where vert = "shaders/" ++ name ++ ".vert"
frag = "shaders/" ++ name ++ ".frag"
setShader :: IORef [Maybe Program] -> [LispVal] -> IO LispVal
setShader shdLstRef [Number n] = do
shdLst <- get shdLstRef
-- TODO: check bounds
currentProgram $= shdLst !! (fromIntegral n)
return (Number 1)
setShader shdLstRef [] = setShader shdLstRef [Number 0]
getShader :: IORef [Maybe Program] -> [LispVal] -> IO LispVal
getShader shdLstRef [] = do
shdLst <- get shdLstRef
shd <- get $ currentProgram
let Just index = elemIndex shd shdLst
return (Number $ fromIntegral index)
setUniform shdLstRef [String name, f@(Float _)] = do
Just prg <- get currentProgram
let setUniform var val = do
location <- get (uniformLocation prg var)
reportErrors
uniform location $= val
setUniform name (Index1 ((extractFloat f) :: GLfloat))
return (Number 1)
setUniform shdLstRef [String name, (Vector v)] = do
Just prg <- get currentProgram
let [x, y, z] = map extractGLfloat (elems v)
setUniform var val = do
location <- get (uniformLocation prg var)
reportErrors
uniform location $= val
setUniform name (Vertex3 x y z)
return (Number 1)
doBlur :: State -> [LispVal] -> IO LispVal
doBlur state [n] = do
blurFactor state $= (extractFloat n)
return (Number 1)
initShaders :: State -> IO [(String, [LispVal] -> IO LispVal)]
initShaders state = do
shdLstRef <- newIORef [Nothing]
return [
("load-shader", loadShader shdLstRef),
("shader", setShader shdLstRef),
("get-shader", getShader shdLstRef),
("set-uniform", setUniform shdLstRef),
("blur", doBlur state)
]
| lordi/flowskell | src/Flowskell/Lib/Shaders.hs | gpl-2.0 | 3,271 | 0 | 15 | 1,179 | 971 | 486 | 485 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
module Turing.Konfiguration where
-- $Id$
import Machine.History
import Turing.Type
import Autolib.ToDoc
import Data.Typeable
data Konfiguration y z =
Konfiguration { band_links :: [ y ]
, aktuelles_zeichen :: y
, band_rechts :: [ y ]
, zustand :: z
, geschichte :: [Konfiguration y z]
, schritt :: Int
}
deriving Typeable
instance History ( Konfiguration y z ) where
history = geschichte
nummer :: TuringC y z
=> Konfiguration y z -> Int
nummer = length . geschichte
instance TuringC y z
=> Show (Konfiguration y z) where
showsPrec p k = showString $
( show ( schritt k ) ++ ": " )
++ " " ++ ( show (reverse (band_links k)) )
++ " " ++ ( show (zustand k, aktuelles_zeichen k) )
++ " " ++ ( show ( band_rechts k) )
instance TuringC y z => ToDoc (Konfiguration y z) where
toDoc = text . show
showlinks :: TuringC y z
=> Konfiguration y z -> String
showlinks = unlines . map show . links
wesentlich k =
(band_links k, aktuelles_zeichen k, band_rechts k, zustand k)
-- aber link nicht
instance (Eq y, Eq z) => Eq (Konfiguration y z) where
k1 == k2 = wesentlich k1 == wesentlich k2
instance (Ord y, Ord z) => Ord (Konfiguration y z) where
k1 `compare` k2 = wesentlich k1 `compare` wesentlich k2
links :: Konfiguration y z -> [ Konfiguration y z ]
links k = k : geschichte k
start_konfiguration :: TuringC y z
=> Turing y z -> [y] -> Konfiguration y z
start_konfiguration m xs =
Konfiguration { band_links = []
, aktuelles_zeichen = case xs of [] -> leerzeichen m
(y : ys) -> y
, zustand = startzustand m
, band_rechts = case xs of [] -> []
(y : ys) -> ys
, geschichte = []
, schritt = 0
}
bandinhalt :: TuringC y z
=> Turing y z -> Konfiguration y z -> [ y ]
bandinhalt m k =
let e = leerzeichen m
strip_links = dropWhile (== e)
strip_rechts = reverse . strip_links . reverse
band = reverse ( band_links k ) ++ aktuelles_zeichen k : band_rechts k
in strip_links . strip_rechts $ band
| Erdwolf/autotool-bonn | src/Turing/Konfiguration.hs | gpl-2.0 | 2,149 | 22 | 15 | 604 | 811 | 422 | 389 | 58 | 3 |
-- | Provides a system for analysing image data.
module Graphics.Forensics.Analyser
( -- * Analyser
Analyser(..)
-- * Output
, Analysis
-- * Task handling
, task
, step
-- * Evaluation
, evaluate
-- * Reporting
, report
, reportEntry
-- ** Convenience functions
, reportDebug
, reportInfo
, reportWarning
, reportError
, reportCritical
-- ** Convenience text functions
, whisper
, inform
, warn
, yell
, panic
-- * Misc
, readVersion
) where
import Control.DeepSeq
import Control.Monad.Progress (ProgressT)
import qualified Control.Monad.Progress as Progress
import Control.Monad.Writer
import qualified Data.Set as Set
import Data.Text (Text)
import Data.Version (Version, parseVersion)
import Data.Word
import Text.ParserCombinators.ReadP
import Graphics.Forensics.Report
-- | A monad modelling a running analysis.
type Analysis a = ProgressT Text (Writer Report) a
{-|
An 'Analyser' that analyses some type of item.
The analyser can yield 'Progress' indications during its execution, and
incrementally write to a 'Report' that is to be considered the result
of the analysis.
-}
data Analyser i =
Analyser
{ -- | Analyse the given item with this analyser
analyse :: i -> Analysis ()
-- | The human-readable name of this analyser
, name :: Text
-- | The author of this analyser
, author :: Text
-- | The version of this analyser
, version :: Version
}
-- | Fully evaluates the argument, ensuring that it is evaluated
-- before the next monad action.
evaluate :: (NFData a) => a -> Analysis a
evaluate a = a `deepseq` return a
-- | Wraps an 'Analysis' in a task. This wrapping makes it possible to
-- monitor the progress of the task.
task :: Text -- ^ Name of the task
-> Word -- ^ Number of steps required to complete the task
-> Analysis a -- ^ The 'Analysis' performing the task
-> Analysis a
task = Progress.task
-- | Marks one step of the current 'Analysis' as completed. This
-- function may only be used if the current 'Analysis' performs a 'task'
step :: Analysis ()
step = Progress.step
-- | Merges a report with the report of this 'Analysis'
report :: Report -> Analysis ()
report = lift . tell
-- | Adds a report entry to the report of this 'Analysis'
reportEntry :: ReportEntry -> Analysis ()
reportEntry = report . Set.singleton
-- | Reports something with a 'DebugLevel' of importance
reportDebug :: Text -> ReportData -> Analysis ()
reportDebug msg = reportEntry . ReportEntry DebugLevel msg
-- | Reports something with an 'InformationLevel' of importance
reportInfo :: Text -> ReportData -> Analysis ()
reportInfo msg = reportEntry . ReportEntry InformationLevel msg
-- | Reports something with a 'WarningLevel' of importance
reportWarning :: Text -> ReportData -> Analysis ()
reportWarning msg = reportEntry . ReportEntry WarningLevel msg
-- | Reports something with an 'ErrorLevel' of importance
reportError :: Text -> ReportData -> Analysis ()
reportError msg = reportEntry . ReportEntry ErrorLevel msg
-- | Reports something with a 'CriticalLevel' of importance
reportCritical :: Text -> ReportData -> Analysis ()
reportCritical msg = reportEntry . ReportEntry CriticalLevel msg
-- | Logs a message with 'DebugLevel' importance
whisper :: Text -> Analysis ()
whisper = flip reportDebug ReportNothing
-- | Logs a message with 'InformationLevel' importance
inform :: Text -> Analysis ()
inform = flip reportInfo ReportNothing
-- | Logs a message with 'WarningLevel' importance
warn :: Text -> Analysis ()
warn = flip reportWarning ReportNothing
-- | Logs a message with 'ErrorLevel' importance
yell :: Text -> Analysis ()
yell = flip reportError ReportNothing
-- | Logs a message with 'CriticalLevel' importance
panic :: Text -> Analysis ()
panic = flip reportCritical ReportNothing
readVersion :: String -> Version
readVersion str = head [x | (x, "") <- readP_to_S parseVersion str]
| Purview/purview | src/Graphics/Forensics/Analyser.hs | gpl-3.0 | 4,107 | 0 | 11 | 924 | 715 | 406 | 309 | 72 | 1 |
module Main (main) where
import Prelude hiding ((.), id)
import Control.Applicative
import Control.Category
import Control.Monad
import Control.Monad.IO.Class
import Data.Lens.Common
import Data.Lens.Template
import System.Environment
import System.IO
import System.Log.Logger
import System.Log.Handler.Simple
import BarTender.Client
import BarTender.Dzen
import BarTender.Options
import BarTender.Timer
import BarTender.Util
-- Options to this program, gotten from the command line, a config file, or
-- something like that
data Options = Options
{ _name :: String -- The client name
, _help :: Bool -- Whether the user wants the help message
, _path :: FilePath -- The command to run
, _connOpts :: ConnectionOptions -- The client connection options
}
deriving Show
$( makeLenses [ ''Options ] )
defaultOptions :: Options
defaultOptions = Options
{ _name = "FileClient"
, _help = False
, _path = "-"
, _connOpts = defaultConnectionOptions
}
optionDescriptions :: [OptDescr (Options -> Either String Options)]
optionDescriptions =
[ Option ['p'] ["port"]
(flip ReqArg "PORT" $ \str -> Right . (connectPort . connOpts ^= str))
"The server port"
, Option ['r'] ["retries"]
(flip ReqArg "N" $ \str -> case maybeRead str of
Just n -> Right . (connectRetries . connOpts ^= n)
Nothing -> const . Left $ "Invalid number '" ++ str ++ "'")
"Number of times to retry connecting"
, Option ['t'] ["timeout"]
(flip ReqArg "N" $ \str -> case maybeRead str of
Just n -> Right . (connectTimeout . connOpts ^= n)
Nothing -> const . Left $ "Invalid number '" ++ str ++ "'")
"Number of seconds to wait for server response"
, Option ['n'] ["name"]
(flip ReqArg "NAME" $ \str -> Right . (name ^= str))
"The name the client reports to the server"
, Option ['h', '?'] ["help"]
(NoArg $ Right . (help ^= True))
"Display this help message"
]
main :: IO ()
main = do
-- Set up logging
logHandler <- verboseStreamHandler stderr DEBUG
updateGlobalLogger rootLoggerName $ setLevel DEBUG . setHandlers [logHandler]
debugM "Main.main" $ "Enter"
errorOrOptions <- handleOpt (Between 1 2) defaultOptions <$>
getOpt RequireOrder optionDescriptions <$> getArgs
debugM "Main.main" $ "errorOrOptions: " ++ show errorOrOptions
case errorOrOptions of
Left error -> putStrLn error >> putStrLn "" >> printHelpMessage
Right (opts, posArgs) -> let options = handlePositional posArgs opts in
if options ^. help
then printHelpMessage
else void $ do
handle <- openInputHandle $ options ^. path
hSetBuffering handle LineBuffering
runClient (options ^. name) $ do
connectClient $ options ^. connOpts
doWhile not $ do
liftIO (hGetLine handle) >>= updateClient
liftIO $ hIsEOF handle
debugM "Main.main" $ "Exit"
where
openInputHandle :: FilePath -> IO Handle
openInputHandle path = if path == "-"
then return stdin
else openFile path ReadMode
handlePositional :: [String] -> Options -> Options
handlePositional posArgs = foldr (.) id
[ connectHost . connOpts ^= posArgs !! 0
, if length posArgs == 2 then (path ^= posArgs !! 1) else id
]
printHelpMessage :: IO ()
printHelpMessage = do
putStr $ usageInfo "FileClient" optionDescriptions
| chrisbouchard/bartender | clients/FileClient/Main.hs | gpl-3.0 | 3,706 | 0 | 25 | 1,097 | 968 | 511 | 457 | -1 | -1 |
{- |
Module : Tct.Encoding.Natring
Copyright : (c) Martin Avanzini <[email protected]>,
Georg Moser <[email protected]>,
Andreas Schnabl <[email protected]>
License : LGPL (see COPYING)
Maintainer : Andreas Schnabl <[email protected]>
Stability : unstable
Portability : unportable
-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Tct.Encoding.Natring where
import qualified Qlogic.Arctic as Arc
import qualified Qlogic.ArcSat as AS
import qualified Qlogic.Diophantine as D
import Qlogic.Formula
import qualified Qlogic.NatSat as N
import Qlogic.Semiring
type GenSizeNatFormula l = N.Size -> N.NatFormula l
type GenSizeArcFormula l = AS.Size -> AS.ArcFormula l
instance Eq l => RingConst (GenSizeNatFormula l) where
czero _ = N.natToFormula 0
cone _ = N.natToFormula 1
ringvar = flip N.natAtom . D.toDioVar
restrictvar v _ = N.natAtom (N.Bound 1) $ D.toDioVar v
instance Eq l => RingConst (GenSizeArcFormula l) where
czero _ = AS.arcToFormula zero
cone _ = AS.arcToFormula one
ringvar = flip AS.arcAtom . D.toDioVar
restrictvar v _ = AS.arcAtom (AS.Bits 1) $ D.toDioVar v
instance Semiring b => RingConst (D.DioPoly D.DioVar b) where
czero = D.constToPoly zero
cone = D.constToPoly one
ringvar = D.varToPoly . D.toDioVar
restrictvar = D.restrictVarToPoly . D.toDioVar
instance RingConst (N.Size -> Int) where
czero = const 0
cone = const 1
ringvar = const $ const 0
restrictvar = const $ const 0
instance RingConst (AS.Size -> Arc.ArcInt) where
czero = const Arc.MinusInf
cone = const $ Arc.Fin 0
ringvar = const $ const $ Arc.Fin 0
restrictvar = const $ const $ Arc.Fin 0
-- instance Eq l => Semiring (N.NatFormula l) where
-- plus = (N..+.)
-- prod = (N..*.)
-- zero = N.natToFormula 0
-- one = N.natToFormula 1
instance (Eq a, Eq b, Semiring b) => Semiring (D.DioPoly a b) where
plus = D.add
prod = D.mult
zero = D.constToPoly zero
one = D.constToPoly one
-- instance Eq l => AbstrEq (N.NatFormula l) (PropFormula l) where
-- (.==.) = (N..=.)
--
-- instance Eq l => AbstrOrd (N.NatFormula l) (PropFormula l) where
-- (.<.) = flip (N..>.)
-- (.<=.) = flip (N..>=.)
--
-- instance Eq l => AbstrOrdSemiring (N.NatFormula l) (PropFormula l)
instance (Eq l, Eq a, Eq b) => AbstrEq (D.DioPoly a b) (D.DioFormula l a b) where
x .==. y = A (x `D.Equ` y)
instance (Eq l, Eq a, Eq b) => AbstrOrd (D.DioPoly a b) (D.DioFormula l a b) where
x .<. y = A (y `D.Grt` x)
x .<=. y = A (y `D.Geq` x)
instance (Eq l, Eq b, Semiring b) => AbstrOrdSemiring (D.DioPoly D.DioVar b) (D.DioFormula l D.DioVar b)
instance AbstrEq Int Bool where
(.==.) = (==)
instance AbstrOrd Int Bool where
(.<.) = (<)
(.<=.) = (<=)
instance AbstrEq Arc.ArcInt Bool where
(.==.) = (==)
instance AbstrOrd Arc.ArcInt Bool where
(.<.) = (Arc.<)
(.<=.) = (Arc.<=)
instance AbstrOrdSemiring Int Bool
instance AbstrOrdSemiring Arc.ArcInt Bool
| mzini/TcT | source/Tct/Encoding/Natring.hs | gpl-3.0 | 3,082 | 0 | 10 | 643 | 933 | 502 | 431 | 60 | 0 |
module Main(Main.main) where
import Graphics.UI.Gtk
import Graphics.UI.Gtk.Gdk.GC hiding (fill)
import Graphics.Rendering.Cairo
import Control.Monad.State as MS
import Control.Concurrent.MVar
import Control.Concurrent
import System.Random
import AbstractUI
import Core
import Core.Game
-- Constants
bluishGray = Color (256*48) (256*99) (256*99)
bluishSilver = Color (256*210) (256*255) (256*255)
blockSize = 16
blockMargin = 1
setBluishLighterGray = setSourceRGB (79/256) (130/256) (130/256)
setBluishGray = setSourceRGB (48/256) (99/256) (99/256)
setBluishEvenLighter = setSourceRGB (145/256) (196/256) (196/256)
setBluishSilver = setSourceRGB (210/256) (255/256) (255/256)
main :: IO()
main = do
-- Model
seed <- getStdGen
ui <- newMVar (newUI seed)
-- Every so often, we try to run other threads.
timeoutAddFull (yield >> return True) priorityDefaultIdle 100
-- GUI components
initGUI
window <- windowNew
set window [windowTitle := "Tetrix",
windowDefaultWidth := 700, windowDefaultHeight := 400]
frame <- frameNew
containerAdd window frame
canvas <- drawingAreaNew
containerAdd frame canvas
widgetModifyBg canvas StateNormal bluishGray
-- Show and run
widgetShowAll window
drawin <- widgetGetDrawWindow canvas
-- Create the lightweight thread that controls ticking
forkIO (tickUI ui canvas)
-- Events and callbacks
window `on` deleteEvent $ tryEvent (liftIO mainQuit)
canvas `on` exposeEvent $ tryEvent (exposeHandler ui drawin)
window `on` keyPressEvent $ tryEvent (keyPressHandler ui canvas)
-- Main loop
mainGUI
-- Ticking functions
tickUI :: MVar AbstractUI -> DrawingArea -> IO()
tickUI ui canvas = do
threadDelay 1000000
aui <- takeMVar ui
putMVar ui (tick aui)
postGUIAsync $ widgetQueueDraw canvas
tickUI ui canvas
-- Handlers
-- Redraw handler
exposeHandler :: MVar AbstractUI -> DrawWindow -> EventM EExpose ()
exposeHandler ui drawin = do
content <- liftIO $ readMVar ui
liftIO $ renderWithDrawable drawin (render content)
-- Handles all the keyboard interactions
keyPressHandler :: WidgetClass a => MVar AbstractUI -> a -> EventM EKey ()
keyPressHandler mvs drawin = do
key <- eventKeyVal
liftIO $ updateModel mvs key
liftIO $ widgetQueueDraw drawin
-- Changes the Abstract View
updateModel :: MVar AbstractUI -> KeyVal -> IO ()
updateModel ui key = do
oldUI <- takeMVar ui
let newUI = case key of
32 -> dropPiece oldUI -- Space
65362 -> rotateCW oldUI -- Up
65364 -> tick oldUI -- Down
65361 -> left oldUI -- Left
65363 -> right oldUI -- Right
_ -> oldUI
putMVar ui newUI
drawBoard :: (Int,Int) -> (Int,Int) -> [Block] -> [Block] -> Render()
drawBoard (offx,offy) size blks cp = do
drawEmptyGrid size
drawBlocks size blks
drawCurrent size blks
where
drawCurrent :: (Int,Int) -> [Block] -> Render()
drawCurrent tam bs = do
setBluishSilver
paintBlocks tam bs
drawBlocks :: (Int,Int) -> [Block] -> Render()
drawBlocks size blks = do
setBluishEvenLighter
paintBlocks size blks
paintBlocks :: (Int,Int) -> [Block] -> Render()
paintBlocks (a,b) bs = do
mapM_ (\blk -> let (x,y) = posBlock blk
in buildRectangle (a,b) (fromIntegral x) (fromIntegral y)) bs
fill
drawEmptyGrid :: (Int,Int) -> Render ()
drawEmptyGrid (a,b) = do
setBluishLighterGray
setLineWidth 1
let coords = [(fromIntegral x, fromIntegral y) | x <- [0..(a-1)], y <- [0..(b-1)]]
recs = map (\(x,y) -> buildRectangle (a,b) x y) coords
sequence_ recs
stroke
buildRectangle :: (Int,Int) -> Double -> Double -> Render()
buildRectangle (a,b) x y = rectangle x0 y0 width height
where
x0 = (x+fromIntegral offx)*blockSize
y0 = (fromIntegral (b+offy)-y)*blockSize
width = blockSize
height = blockSize
render :: AbstractUI -> Render()
render ui = do
let gv = view ui
size = gridSizeGV gv
blks = blocksGV gv
cp = currentGV gv
next = nextGV gv
drawBoard (0,0) size blks cp
drawBoard (12,0) (4,4) next []
| tonicebrian/tetris-haskell | src/Main.hs | gpl-3.0 | 4,501 | 0 | 17 | 1,291 | 1,531 | 780 | 751 | 108 | 6 |
{-# LANGUAGE OverloadedStrings #-}
module Skeleton.Kernel.Core.Sort (
sort
, sortM
, sortL
) where
import Data.Time.Clock
import Data.List (sortOn)
import Skeleton.Kernel.Core.Helper (getTime, getStar)
import Skeleton.Kernel.Internal.Type
gravity :: Float
gravity = 2.0
tiltIndex :: Float
tiltIndex = 10
computeWeight' :: UTCTime
-> UTCTime
-> Float
computeWeight' u1 u2 =
(fromIntegral $ fromEnum (diffUTCTime u1 u2) `div` 60000000000000) / 60.0 + 1.0
computeWeight :: UTCTime
-> Int
-> UTCTime
-> Float
computeWeight u1 star u2 =
(fromIntegral star + tiltIndex) ** gravity / (computeWeight' u1 u2)
compareE :: (Float, Ele)
-> Float
compareE (f, _) = 0 - f
sort :: UTCTime
-> [Ele]
-> [Ele]
sort _ [] = []
sort u xs = snd $ unzip $
sortOn compareE $
zip (map (\x -> computeWeight u (fst x) (snd x))
(zip (map getStar xs)
(map getTime xs)))
xs
compareEM :: EleM -- mail sort
-> Int
compareEM (_, _, _, s) = 0 - s
sortM :: [EleM]
-> [EleM]
sortM [] = []
sortM xs = take 10 $ sortOn compareEM xs
compareEL :: DataEle -- latest news
-> UTCTime
compareEL (_, _, _, _, _, _, _, u, _, _) = u
sortL :: [DataEle]
-> [DataEle]
sortL [] = []
sortL xs = reverse $ sortOn compareEL xs
| ProLambda/Times | Skeleton/Kernel/Core/Sort.hs | gpl-3.0 | 1,555 | 0 | 13 | 593 | 516 | 286 | 230 | 51 | 1 |
module FourChan.Board
( Board
, getBoardName
, getTitle
, getNumPages
, getNumThreadsPerPage
, getBoardsInfo
, getBoardInfo
) where
import Data.Aeson
import Data.Foldable (find)
import Data.Maybe
import qualified Data.Map as M
import Data.Text (pack)
import FourChan.Helpers.Download
boardsUrl :: String
boardsUrl = "http://api.4chan.org/boards.json"
data Board = Board
{ getBoardName :: String
, getTitle :: String
, getNumPages :: Int
, getNumThreadsPerPage :: Int
} deriving (Eq, Show)
instance FromJSON Board where
parseJSON (Object m) = do
boardName <- m .: pack "board"
title <- m .: pack "title"
numPages <- m .: pack "pages"
numThreadsPerPage <- m .: pack "per_page"
return $ Board
{ getBoardName = boardName
, getTitle = title
, getNumPages = numPages
, getNumThreadsPerPage = numThreadsPerPage
}
getBoardsInfo :: IO [Board]
getBoardsInfo = fmap (maybe err (getBoards . fromJust) . decode) $ download boardsUrl
where
err = error "Error decoding board index JSON"
getBoardInfo :: String -> IO Board
getBoardInfo name = fmap findBoard getBoardsInfo
where
findBoard :: [Board] -> Board
findBoard = maybe err id . find (\b -> getBoardName b == name)
err = error $ "Board " ++ name ++ " does not exist"
getBoards :: M.Map String [Board] -> [Board]
getBoards = maybe err id . M.lookup "boards"
where
err = error $ "Could not parse board list"
| xcv-/4chan.hs | lib/FourChan/Board.hs | gpl-3.0 | 1,623 | 0 | 12 | 490 | 433 | 235 | 198 | 44 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.SWF.SignalWorkflowExecution
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Records a 'WorkflowExecutionSignaled' event in the workflow execution history
-- and creates a decision task for the workflow execution identified by the
-- given domain, workflowId and runId. The event is recorded with the specified
-- user defined signalName and input (if provided).
--
-- If a runId is not specified, then the 'WorkflowExecutionSignaled' event is
-- recorded in the history of the current open workflow with the matching
-- workflowId in the domain. If the specified workflow execution is not open,
-- this method fails with 'UnknownResource'. Access Control
--
-- You can use IAM policies to control this action's access to Amazon SWF
-- resources as follows:
--
-- Use a 'Resource' element with the domain name to limit the action to only
-- specified domains. Use an 'Action' element to allow or deny permission to call
-- this action. You cannot use an IAM policy to constrain this action's
-- parameters. If the caller does not have sufficient permissions to invoke the
-- action, or the parameter values fall outside the specified constraints, the
-- action fails. The associated event attribute's cause parameter will be set to
-- OPERATION_NOT_PERMITTED. For details and example IAM policies, see <http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html Using IAMto Manage Access to Amazon SWF Workflows>.
--
-- <http://docs.aws.amazon.com/amazonswf/latest/apireference/API_SignalWorkflowExecution.html>
module Network.AWS.SWF.SignalWorkflowExecution
(
-- * Request
SignalWorkflowExecution
-- ** Request constructor
, signalWorkflowExecution
-- ** Request lenses
, sweDomain
, sweInput
, sweRunId
, sweSignalName
, sweWorkflowId
-- * Response
, SignalWorkflowExecutionResponse
-- ** Response constructor
, signalWorkflowExecutionResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.SWF.Types
import qualified GHC.Exts
data SignalWorkflowExecution = SignalWorkflowExecution
{ _sweDomain :: Text
, _sweInput :: Maybe Text
, _sweRunId :: Maybe Text
, _sweSignalName :: Text
, _sweWorkflowId :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'SignalWorkflowExecution' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'sweDomain' @::@ 'Text'
--
-- * 'sweInput' @::@ 'Maybe' 'Text'
--
-- * 'sweRunId' @::@ 'Maybe' 'Text'
--
-- * 'sweSignalName' @::@ 'Text'
--
-- * 'sweWorkflowId' @::@ 'Text'
--
signalWorkflowExecution :: Text -- ^ 'sweDomain'
-> Text -- ^ 'sweWorkflowId'
-> Text -- ^ 'sweSignalName'
-> SignalWorkflowExecution
signalWorkflowExecution p1 p2 p3 = SignalWorkflowExecution
{ _sweDomain = p1
, _sweWorkflowId = p2
, _sweSignalName = p3
, _sweRunId = Nothing
, _sweInput = Nothing
}
-- | The name of the domain containing the workflow execution to signal.
sweDomain :: Lens' SignalWorkflowExecution Text
sweDomain = lens _sweDomain (\s a -> s { _sweDomain = a })
-- | Data to attach to the 'WorkflowExecutionSignaled' event in the target workflow
-- execution's history.
sweInput :: Lens' SignalWorkflowExecution (Maybe Text)
sweInput = lens _sweInput (\s a -> s { _sweInput = a })
-- | The runId of the workflow execution to signal.
sweRunId :: Lens' SignalWorkflowExecution (Maybe Text)
sweRunId = lens _sweRunId (\s a -> s { _sweRunId = a })
-- | The name of the signal. This name must be meaningful to the target workflow.
sweSignalName :: Lens' SignalWorkflowExecution Text
sweSignalName = lens _sweSignalName (\s a -> s { _sweSignalName = a })
-- | The workflowId of the workflow execution to signal.
sweWorkflowId :: Lens' SignalWorkflowExecution Text
sweWorkflowId = lens _sweWorkflowId (\s a -> s { _sweWorkflowId = a })
data SignalWorkflowExecutionResponse = SignalWorkflowExecutionResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'SignalWorkflowExecutionResponse' constructor.
signalWorkflowExecutionResponse :: SignalWorkflowExecutionResponse
signalWorkflowExecutionResponse = SignalWorkflowExecutionResponse
instance ToPath SignalWorkflowExecution where
toPath = const "/"
instance ToQuery SignalWorkflowExecution where
toQuery = const mempty
instance ToHeaders SignalWorkflowExecution
instance ToJSON SignalWorkflowExecution where
toJSON SignalWorkflowExecution{..} = object
[ "domain" .= _sweDomain
, "workflowId" .= _sweWorkflowId
, "runId" .= _sweRunId
, "signalName" .= _sweSignalName
, "input" .= _sweInput
]
instance AWSRequest SignalWorkflowExecution where
type Sv SignalWorkflowExecution = SWF
type Rs SignalWorkflowExecution = SignalWorkflowExecutionResponse
request = post "SignalWorkflowExecution"
response = nullResponse SignalWorkflowExecutionResponse
| dysinger/amazonka | amazonka-swf/gen/Network/AWS/SWF/SignalWorkflowExecution.hs | mpl-2.0 | 5,985 | 0 | 9 | 1,246 | 643 | 394 | 249 | 73 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{- |
Module : $Header$
Description : Timespan API.
Copyright : (c) plaimi 2014-2015
License : AGPL-3
Maintainer : [email protected]
-} module Tempuhs.Server.Requests.Timespan where
import qualified Database.Esqueleto as E
import Database.Esqueleto
(
(^.),
(&&.),
asc,
orderBy,
val,
)
import Data.Functor
(
(<$>),
)
import Database.Persist
(
entityKey,
insert,
replace,
update,
)
import Database.Persist.Sql
(
ConnectionPool,
)
import Web.Scotty.Trans
(
json,
)
import Plailude
import Tempuhs.Chronology hiding (second)
import Tempuhs.Server.Database
(
(=..),
clockParam,
joinList,
liftAE,
mkKey,
runDatabase,
)
import Tempuhs.Server.DELETE
(
attributesNowow,
owow,
)
import Tempuhs.Server.Laws.Timespan
(
flexTimespan,
parentCycle,
)
import Tempuhs.Server.Param
(
defaultParam,
maybeParam,
maybeUnwrap,
paramE,
rescueMissing,
)
import Tempuhs.Server.Requests.Attributes.Mono
(
getTimespanAttrs,
updateTimespanAttributes,
)
import Tempuhs.Server.Requests.Attributes.Poly
(
attributesParams,
attributeSearch,
insertAttributes,
patchAttribute,
)
import Tempuhs.Server.Requests.Timespan.Util
(
clockFilter,
descendantLookup,
filters,
idFilter,
timeParams,
)
import Tempuhs.Server.Spock
(
ActionE,
errInternal,
errInvalidParam,
jsonError,
jsonKey,
)
timespans :: ConnectionPool -> ActionE ()
-- | 'timespans' serves a basic request for a list of 'Timespan's with their
-- associated 'TimespanAttribute's.
timespans pool = do
tid <- maybeParam "id"
p <- maybeParam "parent"
b <- maybeParam "begin"
e <- maybeParam "end"
r <- maybeParam "rubbish"
ds <- defaultParam 0 "descendants"
joins <- attributeSearch TimespanAttributeTimespan TimespanId
TimespanAttributeName TimespanAttributeValue
runDatabase pool $ do
c <- liftAE . rescueMissing =<< erretreat (clockParam "c")
list <- E.select $
joinList joins $
E.from $ \t -> do
E.where_ $ foldl (&&.) (val True) (clockFilter t c ++
filters t p r e b ++
idFilter t (mkKey <$> tid))
orderBy [asc $ t ^. TimespanId]
return t
descs <- descendantLookup (ceiling (ds :: Double)) (entityKey <$> list)
liftAE . json =<< mapM (\a -> (,) a <$> getTimespanAttrs a)
(list ++ descs)
postTimespan :: ConnectionPool -> ActionE ()
-- | 'postTimespan' inserts a 'Timespan'.
postTimespan pool = do
p <- maybeParam "parent"
((bMin, bMax), (eMin, eMax)) <- timeParams
w <- defaultParam 1 "weight"
r <- return Nothing
as <- attributesParams
runDatabase pool $ liftAE . jsonKey =<< do
c <- clockParam "clock"
tid <- insert $ Timespan (mkKey <$> p) (entityKey c)
bMin bMax eMin eMax w r
insertAttributes TimespanAttribute as tid
flexTimespan tid
return tid
replaceTimespan :: ConnectionPool -> ActionE ()
-- | 'replaceTimespan' replaces a 'Timespan'.
replaceTimespan pool = do
t <- paramE "timespan"
p <- maybeParam "parent"
((bMin, bMax), (eMin, eMax)) <- timeParams
w <- defaultParam 1 "weight"
r <- return Nothing
as <- attributesParams
runDatabase pool $ do
let tid = mkKey t
q = mkKey <$> p
c <- clockParam "clock"
pCycle <- case q of
Just qq -> parentCycle [tid] qq
_ -> return $ Just False
case pCycle of
Just False -> do
replace tid $ Timespan q (entityKey c) bMin bMax eMin eMax w r
insertAttributes TimespanAttribute as tid
flexTimespan tid
liftAE $ jsonKey tid
Just True -> liftAE $ jsonError $ errInvalidParam "parent: cycle"
Nothing -> liftAE $ jsonError $ errInternal "database inconsistency"
deleteTimespan :: ConnectionPool -> ActionE ()
-- | 'deleteTimespan' updates the rubbish field of an existing 'Timespan'.
deleteTimespan p = attributesNowow "timespan"
TimespanRubbish
getTimespanAttrs
timespanAttributeName
updateTimespanAttributes
p
unsafeDeleteTimespan :: ConnectionPool -> ActionE ()
-- | 'unsafeDeleteClock' hard-deletes a 'Timespan' from the database.
unsafeDeleteTimespan = owow "timespan" timespanRubbish
patchTimespan :: ConnectionPool -> ActionE ()
-- | 'patchTimespan' modifies a 'Timespan'.
patchTimespan pool = do
t <- paramE "timespan"
p <- maybeParam "parent"
bMin <- maybeParam "beginMin"
bMax <- maybeParam "beginMax"
eMin <- maybeParam "endMin"
eMax <- maybeParam "endMax"
w <- maybeParam "weight"
as <- attributesParams
runDatabase pool $ do
c <- liftAE . rescueMissing =<< erretreat (clockParam "clock")
let k = mkKey t
q = fmap mkKey . maybeUnwrap <$> p
pcycle <- case q of
Just (Just r) -> parentCycle [k] r
_ -> return $ Just False
case pcycle of
Just False -> do
update k $ concat
[TimespanParent =.. q
,TimespanClock =.. (entityKey <$> c)
,TimespanBeginMin =.. bMin
,TimespanBeginMax =.. bMax
,TimespanEndMin =.. eMin
,TimespanEndMax =.. eMax
,TimespanWeight =.. w
]
updateTimespanAttributes k as
flexTimespan k
liftAE $ jsonKey k
Just True -> liftAE $ jsonError $ errInvalidParam "parent: cycle"
Nothing -> liftAE $ jsonError $ errInternal "database inconsistency"
patchTimespanAttribute :: ConnectionPool -> ActionE ()
-- | 'patchTimespanAttribute' sets or removes a 'TimespanAttribute'.
patchTimespanAttribute = patchAttribute "timespan" UniqueTimespanAttribute
TimespanAttributeValue
TimespanAttribute
TimespanAttributeRubbish
timespanAttributeRubbish
| plaimi/tempuhs-server | src/Tempuhs/Server/Requests/Timespan.hs | agpl-3.0 | 6,624 | 0 | 21 | 2,266 | 1,536 | 780 | 756 | 181 | 4 |
-- | A parser for gtk-doc formatted documentation, see
-- https://developer.gnome.org/gtk-doc-manual/ for the spec.
module Data.GI.CodeGen.GtkDoc
( parseGtkDoc
, GtkDoc(..)
, Token(..)
, Language(..)
, Link(..)
, ListItem(..)
, CRef(..)
) where
import Prelude hiding (takeWhile)
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>), (<*))
#endif
import Data.Monoid ((<>))
import Control.Applicative ((<|>))
import Data.Attoparsec.Text
import Data.Char (isAsciiUpper, isAsciiLower, isDigit)
import qualified Data.Text as T
import Data.Text (Text)
-- | A parsed gtk-doc token.
data Token = Literal Text
| Verbatim Text
| CodeBlock (Maybe Language) Text
| ExternalLink Link
| Image Link
| List [ListItem]
| SectionHeader Int GtkDoc -- ^ A section header of the given depth.
| SymbolRef CRef
deriving (Show, Eq)
-- | A link to a resource, either offline or a section of the documentation.
data Link = Link { linkName :: Text
, linkAddress :: Text }
deriving (Show, Eq)
-- | An item in a list, given by a list of lines (not including ending
-- newlines). The list is always non-empty, so we represent it by the
-- first line and then a possibly empty list with the rest of the lines.
data ListItem = ListItem GtkDoc [GtkDoc]
deriving (Show, Eq)
-- | The language for an embedded code block.
newtype Language = Language Text
deriving (Show, Eq)
-- | A reference to some symbol in the API.
data CRef = FunctionRef Text
| ParamRef Text
| ConstantRef Text
| SignalRef Text Text
| PropertyRef Text Text
| VMethodRef Text Text
| StructFieldRef Text Text
| TypeRef Text
deriving (Show, Eq, Ord)
-- | A parsed representation of gtk-doc formatted documentation.
newtype GtkDoc = GtkDoc [Token]
deriving (Show, Eq)
-- | Parse the given gtk-doc formatted documentation.
--
-- === __Examples__
-- >>> parseGtkDoc ""
-- GtkDoc []
--
-- >>> parseGtkDoc "func()"
-- GtkDoc [SymbolRef (FunctionRef "func")]
--
-- >>> parseGtkDoc "literal"
-- GtkDoc [Literal "literal"]
--
-- >>> parseGtkDoc "This is a long literal"
-- GtkDoc [Literal "This is a long literal"]
--
-- >>> parseGtkDoc "Call foo() for free cookies"
-- GtkDoc [Literal "Call ",SymbolRef (FunctionRef "foo"),Literal " for free cookies"]
--
-- >>> parseGtkDoc "The signal ##%#GtkButton::activate is related to gtk_button_activate()."
-- GtkDoc [Literal "The signal ##%",SymbolRef (SignalRef "GtkButton" "activate"),Literal " is related to ",SymbolRef (FunctionRef "gtk_button_activate"),Literal "."]
--
-- >>> parseGtkDoc "# A section\n\n## and a subsection ##\n"
-- GtkDoc [SectionHeader 1 (GtkDoc [Literal "A section"]),Literal "\n",SectionHeader 2 (GtkDoc [Literal "and a subsection "])]
--
-- >>> parseGtkDoc "Compact list:\n- First item\n- Second item"
-- GtkDoc [Literal "Compact list:\n",List [ListItem (GtkDoc [Literal "First item"]) [],ListItem (GtkDoc [Literal "Second item"]) []]]
--
-- >>> parseGtkDoc "Spaced list:\n\n- First item\n\n- Second item"
-- GtkDoc [Literal "Spaced list:\n",List [ListItem (GtkDoc [Literal "First item"]) [],ListItem (GtkDoc [Literal "Second item"]) []]]
--
-- >>> parseGtkDoc "List with urls:\n- [test](http://test)\n- "
-- GtkDoc [Literal "List with urls:\n",List [ListItem (GtkDoc [ExternalLink (Link {linkName = "test", linkAddress = "http://test"})]) [],ListItem (GtkDoc [Image (Link {linkName = "", linkAddress = "image.png"})]) []]]
parseGtkDoc :: Text -> GtkDoc
parseGtkDoc raw =
case parseOnly (parseTokens <* endOfInput) raw of
Left e ->
error $ "gtk-doc parsing failed with error \"" <> e
<> "\" on the input \"" <> T.unpack raw <> "\""
Right tks -> GtkDoc . coalesceLiterals
. restoreSHPreNewlines . restoreListPreNewline $ tks
-- | `parseSectionHeader` eats the newline before the section header,
-- but `parseInitialSectionHeader` does not, since it only matches at
-- the beginning of the text. This restores the newlines eaten by
-- `parseSectionHeader`, so a `SectionHeader` returned by the parser
-- can always be assumed /not/ to have an implicit starting newline.
restoreSHPreNewlines :: [Token] -> [Token]
restoreSHPreNewlines [] = []
restoreSHPreNewlines (i : rest) = i : restoreNewlines rest
where restoreNewlines :: [Token] -> [Token]
restoreNewlines [] = []
restoreNewlines (s@(SectionHeader _ _) : rest) =
Literal "\n" : s : restoreNewlines rest
restoreNewlines (x : rest) = x : restoreNewlines rest
-- | `parseList` eats the newline before the list, restore it.
restoreListPreNewline :: [Token] -> [Token]
restoreListPreNewline [] = []
restoreListPreNewline (l@(List _) : rest) =
Literal "\n" : l : restoreListPreNewline rest
restoreListPreNewline (x : rest) = x : restoreListPreNewline rest
-- | Accumulate consecutive literals into a single literal.
coalesceLiterals :: [Token] -> [Token]
coalesceLiterals tks = go Nothing tks
where
go :: Maybe Text -> [Token] -> [Token]
go Nothing [] = []
go (Just l) [] = [Literal l]
go Nothing (Literal l : rest) = go (Just l) rest
go (Just l) (Literal l' : rest) = go (Just (l <> l')) rest
go Nothing (tk : rest) = tk : go Nothing rest
go (Just l) (tk : rest) = Literal l : tk : go Nothing rest
-- | Parser for tokens.
parseTokens :: Parser [Token]
parseTokens = headerAndTokens <|> justTokens
where -- In case the input starts by a section header.
headerAndTokens :: Parser [Token]
headerAndTokens = do
header <- parseInitialSectionHeader
tokens <- justTokens
return (header : tokens)
justTokens :: Parser [Token]
justTokens = many' parseToken
-- | Parse a single token.
--
-- === __Examples__
-- >>> parseOnly (parseToken <* endOfInput) "func()"
-- Right (SymbolRef (FunctionRef "func"))
parseToken :: Parser Token
parseToken = -- Note that the parsers overlap, so this is not as
-- efficient as it could be (if we had combined parsers
-- and then branched, so that there is no
-- backtracking). But speed is not an issue here, so for
-- clarity we keep the parsers distinct. The exception
-- is parseFunctionRef, since it does not complicate the
-- parser much, and it is the main source of
-- backtracking.
parseFunctionRef
<|> parseSignal
<|> parseProperty
<|> parseVMethod
<|> parseStructField
<|> parseType
<|> parseConstant
<|> parseParam
<|> parseEscaped
<|> parseVerbatim
<|> parseCodeBlock
<|> parseUrl
<|> parseImage
<|> parseSectionHeader
<|> parseList
<|> parseBoringLiteral
-- | Parse a signal name, of the form
-- > #Object::signal
--
-- === __Examples__
-- >>> parseOnly (parseSignal <* endOfInput) "#GtkButton::activate"
-- Right (SymbolRef (SignalRef "GtkButton" "activate"))
parseSignal :: Parser Token
parseSignal = do
_ <- char '#'
obj <- parseCIdent
_ <- string "::"
signal <- signalOrPropName
return (SymbolRef (SignalRef obj signal))
-- | Parse a property name, of the form
-- > #Object:property
--
-- === __Examples__
-- >>> parseOnly (parseProperty <* endOfInput) "#GtkButton:always-show-image"
-- Right (SymbolRef (PropertyRef "GtkButton" "always-show-image"))
parseProperty :: Parser Token
parseProperty = do
_ <- char '#'
obj <- parseCIdent
_ <- char ':'
property <- signalOrPropName
return (SymbolRef (PropertyRef obj property))
-- | Parse a reference to a virtual method, of the form
-- > #Struct.method()
--
-- === __Examples__
-- >>> parseOnly (parseVMethod <* endOfInput) "#Foo.bar()"
-- Right (SymbolRef (VMethodRef "Foo" "bar"))
parseVMethod :: Parser Token
parseVMethod = do
_ <- char '#'
obj <- parseCIdent
_ <- char '.'
method <- parseCIdent
_ <- string "()"
return (SymbolRef (VMethodRef obj method))
-- | Parse a reference to a struct field, of the form
-- > #Struct.field
--
-- === __Examples__
-- >>> parseOnly (parseStructField <* endOfInput) "#Foo.bar"
-- Right (SymbolRef (StructFieldRef "Foo" "bar"))
parseStructField :: Parser Token
parseStructField = do
_ <- char '#'
obj <- parseCIdent
_ <- char '.'
field <- parseCIdent
return (SymbolRef (StructFieldRef obj field))
-- | Parse a reference to a C type, of the form
-- > #Type
--
-- === __Examples__
-- >>> parseOnly (parseType <* endOfInput) "#Foo"
-- Right (SymbolRef (TypeRef "Foo"))
parseType :: Parser Token
parseType = do
_ <- char '#'
obj <- parseCIdent
return (SymbolRef (TypeRef obj))
-- | Parse a constant, of the form
-- > %CONSTANT_NAME
--
-- === __Examples__
-- >>> parseOnly (parseConstant <* endOfInput) "%TEST_CONSTANT"
-- Right (SymbolRef (ConstantRef "TEST_CONSTANT"))
parseConstant :: Parser Token
parseConstant = do
_ <- char '%'
c <- parseCIdent
return (SymbolRef (ConstantRef c))
-- | Parse a reference to a parameter, of the form
-- > @param_name
--
-- === __Examples__
-- >>> parseOnly (parseParam <* endOfInput) "@test_param"
-- Right (SymbolRef (ParamRef "test_param"))
parseParam :: Parser Token
parseParam = do
_ <- char '@'
param <- parseCIdent
return (SymbolRef (ParamRef param))
-- | Whether the given character is valid in a C identifier.
isCIdent :: Char -> Bool
isCIdent '_' = True
isCIdent c = isDigit c || isAsciiUpper c || isAsciiLower c
-- | Name of a signal or property name. Similar to a C identifier, but
-- hyphens are allowed too.
signalOrPropName :: Parser Text
signalOrPropName = takeWhile1 isSignalOrPropIdent
where isSignalOrPropIdent :: Char -> Bool
isSignalOrPropIdent '-' = True
isSignalOrPropIdent c = isCIdent c
-- | Something that could be a valid C identifier (loosely speaking,
-- we do not need to be too strict here).
parseCIdent :: Parser Text
parseCIdent = takeWhile1 isCIdent
-- | Parse a function ref, given by a valid C identifier followed by
-- '()', for instance 'gtk_widget_show()'. If the identifier is not
-- followed by "()", return it as a literal instead.
--
-- === __Examples__
-- >>> parseOnly (parseFunctionRef <* endOfInput) "test_func()"
-- Right (SymbolRef (FunctionRef "test_func"))
--
-- >>> parseOnly (parseFunctionRef <* endOfInput) "not_a_func"
-- Right (Literal "not_a_func")
parseFunctionRef :: Parser Token
parseFunctionRef = do
ident <- parseCIdent
option (Literal ident) (string "()" >>
return (SymbolRef (FunctionRef ident)))
-- | Parse a escaped special character, i.e. one preceded by '\'.
parseEscaped :: Parser Token
parseEscaped = do
_ <- char '\\'
c <- satisfy (`elem` ("#@%\\`" :: [Char]))
return $ Literal (T.singleton c)
-- | Parse a literal, i.e. anything without a known special
-- meaning. Note that this parser always consumes the first character,
-- regardless of what it is.
parseBoringLiteral :: Parser Token
parseBoringLiteral = do
c <- anyChar
boring <- takeWhile (not . special)
return $ Literal (T.cons c boring)
-- | List of special characters from the point of view of the parser
-- (in the sense that they may be the beginning of something with a
-- special interpretation).
special :: Char -> Bool
special '#' = True
special '@' = True
special '%' = True
special '\\' = True
special '`' = True
special '|' = True
special '[' = True
special '!' = True
special '\n' = True
special c = isCIdent c
-- | Parse a verbatim string, of the form
-- > `verbatim text`
--
-- === __Examples__
-- >>> parseOnly (parseVerbatim <* endOfInput) "`Example quote!`"
-- Right (Verbatim "Example quote!")
parseVerbatim :: Parser Token
parseVerbatim = do
_ <- char '`'
v <- takeWhile1 (/= '`')
_ <- char '`'
return $ Verbatim v
-- | Parse a URL in Markdown syntax, of the form
-- > [name](url)
--
-- === __Examples__
-- >>> parseOnly (parseUrl <* endOfInput) "[haskell](http://haskell.org)"
-- Right (ExternalLink (Link {linkName = "haskell", linkAddress = "http://haskell.org"}))
parseUrl :: Parser Token
parseUrl = do
_ <- char '['
name <- takeWhile1 (/= ']')
_ <- string "]("
address <- takeWhile1 (/= ')')
_ <- char ')'
return $ ExternalLink $ Link {linkName = name, linkAddress = address}
-- | Parse an image reference, of the form
-- > 
--
-- === __Examples__
-- >>> parseOnly (parseImage <* endOfInput) ""
-- Right (Image (Link {linkName = "", linkAddress = "diagram.png"}))
parseImage :: Parser Token
parseImage = do
_ <- string "!["
name <- takeWhile (/= ']')
_ <- string "]("
address <- takeWhile1 (/= ')')
_ <- char ')'
return $ Image $ Link {linkName = name, linkAddress = address}
-- | Parse a code block embedded in the documentation.
parseCodeBlock :: Parser Token
parseCodeBlock = do
_ <- string "|["
lang <- (Just <$> parseLanguage) <|> return Nothing
code <- T.pack <$> manyTill anyChar (string "]|")
return $ CodeBlock lang code
-- | Parse the language of a code block, specified as a comment.
parseLanguage :: Parser Language
parseLanguage = do
_ <- string "<!--"
skipSpace
_ <- string "language=\""
lang <- takeWhile1 (/= '"')
_ <- char '"'
skipSpace
_ <- string "-->"
return $ Language lang
-- | Parse a section header, given by a number of hash symbols, and
-- then ordinary text. Note that this parser "eats" the newline before
-- and after the section header.
parseSectionHeader :: Parser Token
parseSectionHeader = char '\n' >> parseInitialSectionHeader
-- | Parse a section header at the beginning of the text. I.e. this is
-- the same as `parseSectionHeader`, but we do not expect a newline as
-- a first character.
--
-- === __Examples__
-- >>> parseOnly (parseInitialSectionHeader <* endOfInput) "### Hello! ###\n"
-- Right (SectionHeader 3 (GtkDoc [Literal "Hello! "]))
--
-- >>> parseOnly (parseInitialSectionHeader <* endOfInput) "# Hello!\n"
-- Right (SectionHeader 1 (GtkDoc [Literal "Hello!"]))
parseInitialSectionHeader :: Parser Token
parseInitialSectionHeader = do
hashes <- takeWhile1 (== '#')
_ <- many1 space
heading <- takeWhile1 (notInClass "#\n")
_ <- (string hashes >> char '\n') <|> (char '\n')
return $ SectionHeader (T.length hashes) (parseGtkDoc heading)
-- | Parse a list header. Note that the newline before the start of
-- the list is "eaten" by this parser, but is restored later by
-- `parseGtkDoc`.
--
-- === __Examples__
-- >>> parseOnly (parseList <* endOfInput) "\n- First item\n- Second item"
-- Right (List [ListItem (GtkDoc [Literal "First item"]) [],ListItem (GtkDoc [Literal "Second item"]) []])
--
-- >>> parseOnly (parseList <* endOfInput) "\n\n- Two line\n item\n\n- Second item,\n also two lines"
-- Right (List [ListItem (GtkDoc [Literal "Two line"]) [GtkDoc [Literal "item"]],ListItem (GtkDoc [Literal "Second item,"]) [GtkDoc [Literal "also two lines"]]])
parseList :: Parser Token
parseList = do
items <- many1 parseListItem
return $ List items
where parseListItem :: Parser ListItem
parseListItem = do
_ <- char '\n'
_ <- string "\n- " <|> string "- "
first <- takeWhile1 (/= '\n')
rest <- many' parseLine
return $ ListItem (parseGtkDoc first) (map parseGtkDoc rest)
parseLine :: Parser Text
parseLine = string "\n " >> takeWhile1 (/= '\n')
| ford-prefect/haskell-gi | lib/Data/GI/CodeGen/GtkDoc.hs | lgpl-2.1 | 15,479 | 0 | 19 | 3,259 | 2,751 | 1,467 | 1,284 | 240 | 6 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
--------------------------------------------------------------------------------
{-|
Module : Menu
Copyright : (c) Daan Leijen 2003
(c) Shelarcy ([email protected]) 2006
License : wxWindows
Maintainer : [email protected]
Stability : provisional
Portability : portable
Defines Menus, toolbars, and statusbars.
The function 'menuPane' is used to create a menu
that can contain 'menuItem's. Menu items can contain event handlers
using ('on' 'command'), but they can also be set, using the 'menu'
function, on a frame or (mdi) window so that the menu command is handled
in the context of the active window, instead of the context of the
entire application.
> do frame <- frame [text := "Demo"]
> file <- menuPane [text := "&File"]
> mclose <- menuItem file [text := "&Close\tCtrl+C", help := "Close the document"]
> set frame [menuBar := [file]
> ,on (menu mclose) := ...]
-}
--------------------------------------------------------------------------------
module Graphics.UI.WX.Menu
( -- * Menu
-- ** Menu containers
MenuBar, Menu, menuBar, menuPopup, menuPane, menuHelp
, menuRes, menuBarLoadRes
-- ** Menu events
, menu, menuId
-- ** Menu items
, MenuItem, menuItem, menuQuit, menuAbout, menuItemEx
, menuItemOnCommandRes, menuLine, menuSub, menuRadioItem
-- * Tool bar
, ToolBar, toolBar, toolBarEx
, ToolBarItem, toolMenu, toolMenuFromBitmap, toolItem, toolControl, tool
-- * Status bar
, StatusField, statusBar, statusField, statusWidth
-- * Deprecated
, menuList, menubar, statusbar
) where
import Data.Char( toUpper )
import Data.List( partition, intersperse )
import System.IO.Unsafe (unsafePerformIO)
import Foreign.Ptr( nullPtr )
import Graphics.UI.WXCore hiding (Event)
import Graphics.UI.WX.Types
import Graphics.UI.WX.Attributes
import Graphics.UI.WX.Layout
import Graphics.UI.WX.Classes
import Graphics.UI.WX.Events
{--------------------------------------------------------------------------------
Menubar
--------------------------------------------------------------------------------}
{-# DEPRECATED menubar "Use menuBar instead" #-}
-- | /Deprecated/: use 'menuBar'.
menubar :: WriteAttr (Frame a) [Menu ()]
menubar
= menuBar
-- | Set the menu bar of a frame.
menuBar :: WriteAttr (Frame a) [Menu ()]
menuBar
= writeAttr "menubar" setter
where
setter frame menus
= do mb <- menuBarCreate wxMB_DOCKABLE
mapM_ (append mb) menus
frameSetMenuBar frame mb
-- set delayed menu handlers on the frame
mapM_ (evtHandlerSetAndResetMenuCommands frame) menus
-- work around menu bug in wxMac 2.5.1
vis <- windowIsShown frame
if (vis && wxToolkit == WxMac && (div wxVersion 100) >= 25)
then do windowHide frame
windowShow frame
return ()
else return ()
append mb menu
= do title <- menuGetTitle menu
menuSetTitle menu ""
menuBarAppend mb menu title
-- | Retrieve a menu bar instance which has been constructed by loading
-- a resource file for a given top level window.
menuBarLoadRes :: Window a -> FilePath -> String -> IO (MenuBar ())
menuBarLoadRes parent rc name =
do
res <- xmlResourceCreateFromFile rc wxXRC_USE_LOCALE
m <- xmlResourceLoadMenuBar res parent name
return m
-- | Show a popup menu for a certain window.
menuPopup :: Menu b -> Point -> Window a -> IO ()
menuPopup menu pt parent
= do windowPopupMenu parent menu pt
return ()
{--------------------------------------------------------------------------------
Menu
--------------------------------------------------------------------------------}
{-# DEPRECATED menuList "Use menuPane instead" #-}
-- | /Deprecated/: use 'menuPane'.
menuList :: [Prop (Menu ())] -> IO (Menu ())
menuList
= menuPane
-- | Create a new menu with a certain title (corresponds with 'text' attribute).
--
-- * Instances: 'Textual'
--
menuPane :: [Prop (Menu ())] -> IO (Menu ())
menuPane props
= do m <- menuCreate "" wxMENU_TEAROFF
set m props
return m
-- | Append a /help/ menu item (@"&Help"@). On some platforms,
-- the /help/ menu is handled specially
menuHelp :: [Prop (Menu ())] -> IO (Menu ())
menuHelp props
= menuPane ([text := "&Help"] ++ props)
-- | Get a menu by name from a menu loaded from a resource file,
-- given the frame which owns the menu. You
-- can directly set properties on the item as part of the call, which
-- enables simple connection of event handlers (e.g. on command).
menuRes :: Window a -> String -> [Prop (Menu ())] -> IO (Menu ())
menuRes parent menu_name props =
do
menu <- xmlResourceGetMenu parent menu_name
set menu props
return menu
instance Textual (Menu a) where
text
= newAttr "text" menuGetTitle menuSetTitle
{--------------------------------------------------------------------------------
Menu items
--------------------------------------------------------------------------------}
-- | Create a submenu item.
menuSub :: Menu b -> Menu a -> [Prop (MenuItem ())] -> IO (MenuItem ())
menuSub parent menu props
= do id <- idCreate
label <- case (findProperty text "" props) of
Just (txt,_) -> return txt
Nothing -> do title <- menuGetTitle menu
if (null title)
then return "<empty>"
else return title
menuSetTitle menu "" -- remove title on submenus
menuAppendSub parent id label menu ""
menuPropagateEvtHandlers menu -- move the evtHandlers to the parent
item <- menuFindItem parent id
set item props
return item
-- | Add a menu seperator.
menuLine :: Menu a -> IO ()
menuLine menu
= menuAppendSeparator menu
-- | Append a menu item. The label can contain
-- menu accellerators by using an ampersand. It can also contain keyboard accellerators
-- after a tab (@'\t'@) character.
--
-- > menuItem menu [text := "&Open\tCtrl+O", help := "Opens an existing document"]
--
-- You can create a checkable menu item by setting 'checkable' to 'True' in the
-- properties of a menu item.
--
-- Note: on GTK, it is required to set the 'text' attribute immediately at creation time.
--
-- * Events: 'menu', 'menuId'
--
-- * Instances: 'Textual', 'Able', 'Help', 'Checkable', 'Identity', 'Commanding'.
--
menuItem :: Menu a -> [Prop (MenuItem ())] -> IO (MenuItem ())
menuItem menu props
= do let kind = case (findProperty checkable False props) of
Just (True,_) -> wxITEM_CHECK
_ -> wxITEM_NORMAL
menuItemKind menu kind props
-- | Append a radio menu item. These items are 'checkable' by default.
-- A sequence of radio menu items form automatically a group.
-- A different kind of menu item, like a 'menuLine', terminates the group.
-- Note: one sometimes has to set the first selected radio item
-- specifically after setting the "menubar" property, or otherwise the
-- radio item bullet is not displayed on windows.
-- See 'menuItem' for other properties of menu radio items.
menuRadioItem :: Menu a -> [Prop (MenuItem ())] -> IO (MenuItem ())
menuRadioItem menu props
= menuItemKind menu wxITEM_RADIO ([checked := True] ++ props)
menuItemKind menu kind props
= do id <- idCreate
let label = case (findProperty text "" props) of
Nothing -> "<empty>"
Just (txt,_) -> txt
menuItemEx menu id label kind props
-- | Append an /about/ menu item (@"&About..."@). On some platforms,
-- the /about/ menu is handled specially.
--
-- * Events: 'menu', 'menuId'
--
-- * Instances: 'Textual', 'Able', 'Help', 'Checkable', 'Identity', 'Commanding'.
--
menuAbout :: Menu a -> [Prop (MenuItem ())] -> IO (MenuItem ())
menuAbout menu props
= menuItemId menu wxID_ABOUT "&About..." props
-- | Append an /quit/ menu item (@"&Quit\tCtrl+Q"@). On some platforms,
-- the /quit/ menu is handled specially
--
-- * Events: 'menu', 'menuId'
--
-- * Instances: 'Textual', 'Able', 'Help', 'Checkable', 'Identity', 'Commanding'.
--
menuQuit :: Menu a -> [Prop (MenuItem ())] -> IO (MenuItem ())
menuQuit menu props
= menuItemId menu wxID_EXIT "&Quit\tCtrl+Q" props
-- | Append a menu item with a specific id and label.
--
-- * Events: 'menu', 'menuId'
--
-- * Instances: 'Textual', 'Able', 'Help', 'Checkable', 'Identity', 'Commanding'.
--
menuItemId :: Menu a -> Id -> String -> [Prop (MenuItem ())] -> IO (MenuItem ())
menuItemId menu id label props
= menuItemEx menu id label wxITEM_NORMAL props
-- | Append a menu item with a specific id, label, and kind (like 'wxITEM_CHECK').
--
-- * Events: 'menu', 'menuId'
--
-- * Instances: 'Textual', 'Able', 'Help', 'Checkable', 'Identity', 'Commanding'.
--
menuItemEx :: Menu a -> Id -> String -> Int -> [Prop (MenuItem ())] -> IO (MenuItem ())
menuItemEx menu id label kind props
= do if (kind == wxITEM_RADIO)
then menuAppendRadioItem menu id label ""
else menuAppend menu id label "" (kind == wxITEM_CHECK)
item <- menuFindItem menu id
set item props
return item
instance Able (MenuItem a) where
enabled = newAttr "enabled" menuItemIsEnabled menuItemEnable
instance Textual (MenuItem a) where
text
= reflectiveAttr "text" menuItemGetItemLabel menuItemSetItemLabel
instance Help (MenuItem a) where
help = newAttr "help" menuItemGetHelp menuItemSetHelp
instance Checkable (MenuItem a) where
checkable = reflectiveAttr "checkable" menuItemIsCheckable (\m c -> menuItemSetCheckable m c)
checked = newAttr "checked" menuItemIsChecked menuItemCheck
instance Identity (MenuItem a) where
identity = newAttr "identity" menuItemGetId menuItemSetId
{--------------------------------------------------------------------------------
Events
--------------------------------------------------------------------------------}
-- | React to menu events.
menu :: MenuItem a -> Event (Window w) (IO ())
menu item
= let id = unsafePerformIO (get item identity)
in menuId id
-- | React to a menu event based on identity.
menuId :: Id -> Event (Window w) (IO ())
menuId id
= newEvent "menu" (\w -> evtHandlerGetOnMenuCommand w id) (\w h -> evtHandlerOnMenuCommand w id h)
{--------------------------------------------------------------------------------
Menu commands:
Ok, we would like to set command handlers in two ways:
1) As an "on command" on the menu item itself.
2) With an "on (menu xxx)" on a window.
Unfortunately, wxWidgets does not support method (1) for menus that are
part of a menubar and assumes they are set on using (2) on the associated
frame. We can't tell whether a menu is part of a menubar or popup menu until
the user sets it. Thus we both set the event handlers always directly on the
top level menu (this is good enough for popup menus) and we maintain
a list of menu item id's and associated event handler as client data on the
top level menu. When the menu is set as part of a menu bar, we install the
handlers on the associated frame.
--------------------------------------------------------------------------------}
instance Commanding (MenuItem a) where
command
= newEvent "command" menuItemGetOnCommand menuItemOnCommand
menuItemGetOnCommand :: MenuItem a -> IO (IO ())
menuItemGetOnCommand item
= do id <- get item identity
topmenu <- menuItemGetTopMenu item
evtHandlerGetOnMenuCommand topmenu id
menuItemOnCommand :: MenuItem a -> IO () -> IO ()
menuItemOnCommand item io
= do id <- get item identity
topmenu <- menuItemGetTopMenu item
-- always set it on the menu itself (has only effect on popup menus)
evtHandlerOnMenuCommand topmenu id io
-- update the Haskell event handler list for delayed frame installation
menuUpdateEvtHandlers topmenu (insert id io)
-- and set it directly on the frame if already instantiated.
frame <- menuGetFrame topmenu
when (not (objectIsNull frame)) (evtHandlerOnMenuCommand frame id io)
where
insert key val [] = [(key,val)]
insert key val ((k,v):xs) | key == k = (key,val):xs
| otherwise = (k,v):insert key val xs
-- | When setting event handlers on menu items which have been loaded from
-- XRC resource files, properties cannot be used as the menu item
-- instances are opaque to wxHaskell.
--
-- This function offers a convenient way to attach menu item event
-- handlers, given the identity of the window which owns the menu containing
-- the menu item, and the name of the menu item
menuItemOnCommandRes :: Window a -> String -> IO () -> IO ()
menuItemOnCommandRes win item_name handler =
do
res <- xmlResourceGet
item_id <- xmlResourceGetXRCID res item_name
evtHandlerOnMenuCommand win item_id handler
-- Propagate the (delayed) event handlers of a submenu to the parent menu.
-- This is necessary for event handlers set on menu items in a submenu that
-- was not yet assigned to a parent menu.
menuPropagateEvtHandlers :: Menu a -> IO ()
menuPropagateEvtHandlers menu
= do parent <- menuGetTopMenu menu
handlers <- menuGetEvtHandlers menu
menuSetEvtHandlers menu []
menuSetEvtHandlers parent handlers
-- Get associated frame of a menu in a menubar. Returns NULL for popup and sub menus.
menuGetFrame :: Menu a -> IO (Frame ())
menuGetFrame menu
= do menubar <- menuGetMenuBar menu
if (objectIsNull menubar)
then return objectNull
else menuBarGetFrame menubar
-- Get top level menu of a menu item (never null)
menuItemGetTopMenu :: MenuItem a -> IO (Menu ())
menuItemGetTopMenu item
= do menu <- menuItemGetMenu item
menuGetTopMenu menu
-- Get the top level menu of a possible sub menu
menuGetTopMenu :: Menu a -> IO (Menu ())
menuGetTopMenu menu
= do parent <- menuGetParent menu
if (objectIsNull parent)
then return (downcastMenu menu)
else menuGetTopMenu parent
-- Set all menu event handlers on a certain window (EvtHandler)
evtHandlerSetAndResetMenuCommands :: EvtHandler a -> Menu b -> IO ()
evtHandlerSetAndResetMenuCommands evtHandler menu
= do handlers <- menuGetEvtHandlers menu
menuSetEvtHandlers menu []
mapM_ (\(id,io) -> evtHandlerOnMenuCommand evtHandler id io) handlers
-- Update the menu event handler list.
menuUpdateEvtHandlers menu f
= do hs <- menuGetEvtHandlers menu
menuSetEvtHandlers menu (f hs)
menuGetEvtHandlers :: Menu a -> IO [(Id,IO ())]
menuGetEvtHandlers menu
= do mbHandlers <- unsafeEvtHandlerGetClientData menu
case mbHandlers of
Nothing -> return []
Just hs -> return hs
menuSetEvtHandlers :: Menu a -> [(Id,IO ())] -> IO ()
menuSetEvtHandlers menu hs
= evtHandlerSetClientData menu (return ()) hs
{--------------------------------------------------------------------------------
Toolbar
--------------------------------------------------------------------------------}
-- | Create a toolbar window with a divider and text labels.
-- Normally, you can use 'toolMenu' to add tools in the toolbar
-- that behave like normal menu items.
--
-- > tbar <- toolBar f []
-- > toolMenu tbar open "Open" "open.png" []
-- > toolMenu tbar about "About" "about.png" []
--
toolBar :: Frame a -> [Prop (ToolBar ())] -> IO (ToolBar ())
toolBar parent props
= toolBarEx parent True True props
-- | Create a toolbar window. The second argument specifies whether text labels
-- should be shown, and the third argument whether a divider line is present
-- above the toolbar.
toolBarEx :: Frame a -> Bool -> Bool -> [Prop (ToolBar ())] -> IO (ToolBar ())
toolBarEx parent showText showDivider props
= do let style = ( wxTB_DOCKABLE .+. wxTB_FLAT
.+. (if showText then wxTB_TEXT else 0)
.+. (if showDivider then 0 else wxTB_NODIVIDER)
)
t <- toolBarCreate parent idAny rectNull style
frameSetToolBar parent t
{-
t <- frameCreateToolBar parent style
-}
set t props
return t
-- | A tool in a toolbar.
--
-- * Events: 'tool'
--
-- * Instances: 'Able', 'Help', 'Tipped', 'Checkable', 'Identity', 'Commanding'.
--
data ToolBarItem = ToolBarItem (ToolBar ()) Id Bool
instance Able ToolBarItem where
enabled
= newAttr "enabled" getter setter
where
getter (ToolBarItem toolbar id isToggle)
= toolBarGetToolEnabled toolbar id
setter (ToolBarItem toolbar id isToggle) enable
= toolBarEnableTool toolbar id enable
instance Tipped ToolBarItem where
tooltip
= newAttr "tooltip" getter setter
where
getter (ToolBarItem toolbar id isToggle)
= toolBarGetToolShortHelp toolbar id
setter (ToolBarItem toolbar id isToggle) txt
= toolBarSetToolShortHelp toolbar id txt
instance Help ToolBarItem where
help
= newAttr "help" getter setter
where
getter (ToolBarItem toolbar id isToggle)
= toolBarGetToolLongHelp toolbar id
setter (ToolBarItem toolbar id isToggle) txt
= toolBarSetToolLongHelp toolbar id txt
instance Checkable ToolBarItem where
checkable
= readAttr "checkable" getter
where
getter (ToolBarItem toolbar id isToggle)
= return isToggle
checked
= newAttr "checked" getter setter
where
getter (ToolBarItem toolbar id isToggle)
= toolBarGetToolState toolbar id
setter (ToolBarItem toolbar id isToggle) toggle
= toolBarToggleTool toolbar id toggle
instance Identity ToolBarItem where
identity
= readAttr "identity" getter
where
getter (ToolBarItem toolbar id isToggle)
= return id
instance Commanding ToolBarItem where
command
= newEvent "command" getter setter
where
getter (ToolBarItem toolbar id isToggle)
= evtHandlerGetOnMenuCommand toolbar id
setter (ToolBarItem toolbar id isToggle) io
= evtHandlerOnMenuCommand toolbar id io
-- | React on tool event (normally handled by 'menu' though, so only use this
-- for orphan toolbar items).
tool :: ToolBarItem -> Event (Window w) (IO ())
tool (ToolBarItem toolbar id isToggle)
= newEvent "tool" getter setter
where
getter w
= evtHandlerGetOnMenuCommand w id
setter w io
= evtHandlerOnMenuCommand w id io
-- | Create a tool bar item based on a menu. Takes a relevant menu
-- item, a label and an image file (bmp, png, gif, ico, etc.) as arguments. The image from the
-- file is normally 16 pixels wide and 15 pixels high.
-- The toolbar item will fire the relevant menu items just as if the menu has been selected.
-- Checkable menus will give a checkable toolbar item. Beware though, that checkable tools
-- normally require a specific @on command@ handler to keep them synchronised with the
-- corresponding menu item.
toolMenu :: ToolBar a -> MenuItem a -> String -> FilePath -> [Prop ToolBarItem] -> IO ToolBarItem
toolMenu toolbar menuitem label bitmapPath props
= withBitmapFromFile bitmapPath $ \bitmap ->
toolMenuFromBitmap toolbar menuitem label bitmap props
-- | This is a generalized version of 'toolMenu' function. You can specify 'Bitmap' that is
-- loaded from any other place instead of using 'FilePath' directly.
toolMenuFromBitmap :: ToolBar a -> MenuItem a -> String -> Bitmap b -> [Prop ToolBarItem] -> IO ToolBarItem
toolMenuFromBitmap toolbar menuitem label bitmap props
= do isToggle <- get menuitem checkable
id <- get menuitem identity
lhelp <- get menuitem help
shelp <- get menuitem help
toolBarAddTool2 toolbar id label bitmap nullBitmap
(if isToggle then wxITEM_CHECK else wxITEM_NORMAL)
shelp lhelp
let t = ToolBarItem (downcastToolBar toolbar) id isToggle
set t props
toolBarRealize toolbar
return t
-- | Create an /orphan/ toolbar item that is unassociated with a menu. Takes a
-- label, a flag that is 'True' when the item is 'checkable' and a path to an image
-- (bmp, png, gif, ico, etc.) as arguments.
toolItem :: ToolBar a -> String -> Bool -> FilePath -> [Prop ToolBarItem] -> IO ToolBarItem
toolItem toolbar label isCheckable bitmapPath props
= withBitmapFromFile bitmapPath $ \bitmap ->
do id <- idCreate
toolBarAddTool2 toolbar id label bitmap nullBitmap
(if isCheckable then wxITEM_CHECK else wxITEM_NORMAL)
"" ""
let t = ToolBarItem (downcastToolBar toolbar) id isCheckable
set t props
toolBarRealize toolbar
return t
-- | Add an arbitrary control to a toolbar (typically a 'ComboBox'). The control
-- must be created with the toolbar as the parent.
toolControl :: ToolBar a -> Control b -> IO ()
toolControl toolbar control
= do toolBarAddControl toolbar control
return ()
{--------------------------------------------------------------------------------
Statusbar
--------------------------------------------------------------------------------}
-- | A field in a status bar.
--
-- * Instances: 'Textual'
--
data StatusField = SF (Var Int) (Var (StatusBar ())) (Var Int) (Var String)
-- | The status width attribute determines the width of a status bar field.
-- A negative width makes the field stretchable. The width than determines
-- the amount of stretch in relation to other fields. The default
-- status width is @-1@, ie. all fields stretch evenly.
--
-- Here is an example of a status bar
-- with three fields, where the last field is 50 pixels wide, the first takes
-- 66% of the remaining space and the second field 33%.
--
-- > field1 <- statusField [statusWidth := -2]
-- > field2 <- statusField [text := "hi"]
-- > field3 <- statusField [statusWidth := 50]
-- > set frame [statusBar := [field1,field2,field3]]
--
statusWidth :: Attr StatusField Int
statusWidth
= newAttr "statusWidth" getter setter
where
getter (SF vwidth _ _ _)
= varGet vwidth
setter (SF vwidth _ _ _) w
= varSet vwidth w
-- | Create a status field.
statusField :: [Prop StatusField] -> IO StatusField
statusField props
= do vwidth<- varCreate (-1)
vsbar <- varCreate objectNull
vidx <- varCreate (-1)
vtext <- varCreate ""
let sf = SF vwidth vsbar vidx vtext
set sf props
return sf
instance Textual StatusField where
text
= newAttr "text" get set
where
get (SF _ vsbar vidx vtext)
= varGet vtext
set (SF _ vsbar vidx vtext) text
= do varSet vtext text
idx <- varGet vidx
if (idx >= 0)
then do sbar <- varGet vsbar
statusBarSetStatusText sbar text idx
else return ()
{-# DEPRECATED statusbar "Use statusBar instead" #-}
-- | /Deprecated/: use 'statusBar'.
statusbar :: WriteAttr (Frame a) [StatusField]
statusbar
= statusBar
-- | Specify the statusbar of a frame.
statusBar :: WriteAttr (Frame a) [StatusField]
statusBar
= writeAttr "statusbar" set
where
set f fields
= do ws <- mapM (\field -> get field statusWidth) fields
sb <- statusBarCreateFields f ws
mapM_ (setsb sb) (zip [0..] fields )
setsb sb (idx,SF _ vsbar vidx vtext)
= do varSet vsbar sb
varSet vidx idx
text <- varGet vtext
statusBarSetStatusText sb text idx -- initialize
| sherwoodwang/wxHaskell | wx/src/Graphics/UI/WX/Menu.hs | lgpl-2.1 | 23,861 | 0 | 15 | 5,590 | 4,744 | 2,376 | 2,368 | 349 | 3 |
import Data.List
repli n x = concat [take n $ repeat x | x <- x]
| nstarke/icc13-introduction-to-haskell | ex15.hs | lgpl-3.0 | 65 | 0 | 8 | 16 | 40 | 19 | 21 | 2 | 1 |
{-|
This package provides functions for building and signing both simple
transactions and multisignature transactions.
-}
module Network.Haskoin.Transaction
(
-- *Transaction Types
Tx(..)
, TxIn(..)
, TxOut(..)
, OutPoint(..)
, CoinbaseTx(..)
, txHash
, nosigTxHash
, cbHash
-- *Build Transactions
, buildTx
, buildAddrTx
-- *Sign Transactions
, SigInput(..)
, signTx
, signInput
, detSignTx
, detSignInput
, mergeTxs
, verifyStdTx
, verifyStdInput
-- *Coin selection
, Coin(..)
, chooseCoins
, chooseMSCoins
, guessTxSize
) where
import Network.Haskoin.Transaction.Builder
import Network.Haskoin.Transaction.Types
| nuttycom/haskoin | Network/Haskoin/Transaction.hs | unlicense | 635 | 0 | 5 | 100 | 122 | 86 | 36 | 26 | 0 |
-- | The monad in which parseCabal is most easily expressed.
module CabalFile.Parser.Types where
import Control.Monad.State
import Control.Monad.Writer
import CabalFile.Types
-- | The string which remains to be parsed, the pieces which have already been
-- parsed, and a result. Or, if unsuccessful, Nothing.
type ParseC a = StateT String (WriterT Cabal Maybe) a
| gelisam/cabal-rangefinder | src/CabalFile/Parser/Types.hs | unlicense | 369 | 0 | 7 | 60 | 49 | 31 | 18 | 5 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module Spark.IO.Internal.InputGeneric(
generic',
genericWithSchema',
genericWithSchema,
extractResourcePath,
updateResourceStamp
) where
import Spark.Core.Types
import Spark.Core.Try
import Spark.Core.Dataset
import Spark.Core.Internal.Utilities(forceRight)
import Spark.Core.Internal.DatasetFunctions(asDF, emptyDataset, emptyLocalData)
import Spark.Core.Internal.TypesStructures(SQLType(..))
import Spark.Core.Internal.ContextStructures(SparkState)
import Spark.Core.Internal.OpStructures
import Spark.Core.Internal.OpFunctions(convertToExtra')
import Spark.Core.Internal.ContextIOInternal(executeCommand1)
import Spark.IO.Internal.InputStructures
{-| Generates a dataframe from a source description.
This may trigger some calculations on the Spark side if schema inference is
required.
-}
generic' :: SourceDescription -> SparkState DataFrame
generic' sd = do
dtt <- _inferSchema sd
return $ dtt >>= \dt -> genericWithSchema' dt sd
{-| Generates a dataframe from a source description, and assumes a given schema.
This schema overrides whatever may have been given in the source description. If
the source description specified that the schema must be checked or inferred,
this instruction is overriden.
While this is convenient, it may lead to runtime errors that are hard to
understand if the data does not follow the given schema.
-}
genericWithSchema' :: DataType -> SourceDescription -> DataFrame
genericWithSchema' dt sd = asDF $ emptyDataset no (SQLType dt) where
sd' = sd { inputSchema = UseSchema dt }
so = StandardOperator {
soName = "org.spark.GenericDatasource",
soOutputType = dt,
soExtra = convertToExtra' sd'
}
no = NodeDistributedOp so
{-| Generates a dataframe from a source description, and assumes a certain
schema on the source.
-}
genericWithSchema :: forall a. (SQLTypeable a) => SourceDescription -> Dataset a
genericWithSchema sd =
let sqlt = buildType :: SQLType a
dt = unSQLType sqlt in
forceRight $ castType sqlt =<< genericWithSchema' dt sd
-- Wraps the action of inferring the schema.
-- This is not particularly efficient here: it does a first pass to get the
-- schema, and then will do a second pass in order to read the data.
_inferSchema :: SourceDescription -> SparkState (Try DataType)
_inferSchema = executeCommand1 . _inferSchemaCmd
-- TODO: this is a monoidal operation, it could be turned into a universal
-- aggregator.
_inferSchemaCmd :: SourceDescription -> LocalData DataType
_inferSchemaCmd sd = emptyLocalData no sqlt where
sqlt = buildType :: SQLType DataType
dt = unSQLType sqlt
so = StandardOperator {
soName = "org.spark.InferSchema",
soOutputType = dt,
soExtra = convertToExtra' sd
}
no = NodeOpaqueAggregator so
| tjhunter/karps | haskell/src/Spark/IO/Internal/InputGeneric.hs | apache-2.0 | 2,906 | 0 | 9 | 461 | 478 | 275 | 203 | 49 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Finance.Hqfl.Pricer.Asay
-- Copyright : (C) 2016 Mika'il Khan
-- License : (see the file LICENSE)
-- Maintainer : Mika'il Khan <[email protected]>
-- Stability : stable
-- Portability : portable
--
----------------------------------------------------------------------------
{-# LANGUAGE FlexibleInstances #-}
module Finance.Hqfl.Pricer.Asay where
import Finance.Hqfl.Instrument
import Statistics.Distribution.Normal
import Data.Random
class Asay a where
price :: a -> Double -> Double -> Double
instance Asay (Option Future) where
price (Option (Future f) m European k t) r v =
case m of
Call -> f * cdf normal d1 - k * cdf normal d2
Put -> k * cdf normal (-d2) - f * cdf normal (-d1)
where d1 = (log (f / k) + ((v * v) / 2) * t) / (v * sqrt t)
d2 = d1 - v * sqrt t
normal = Normal (0 :: Double) 1
| cokleisli/hqfl | src/Finance/Hqfl/Pricer/Asay.hs | apache-2.0 | 971 | 0 | 15 | 217 | 266 | 145 | 121 | 15 | 0 |
{-# LANGUAGE BangPatterns #-}
{-# OPTIONS -Wall #-}
----------------------------------------------------------------------
-- |
-- Module : Data.ZoomCache.Multichannel.Internal
-- Copyright : Conrad Parker
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Conrad Parker <[email protected]>
-- Stability : unstable
-- Portability : unknown
--
-- ZoomCache multichannel API
----------------------------------------------------------------------
module Data.ZoomCache.Multichannel.Internal (
supportMultichannel
, identifyCodecMultichannel
, oneTrackMultichannel
, mkTrackSpecMultichannel
) where
import Data.ByteString (ByteString)
import Data.Functor.Identity
import qualified Data.IntMap as IM
import qualified Data.Iteratee as I
import Data.TypeLevel.Num hiding ((==))
import qualified Data.Iteratee.Offset as OffI
import Data.Iteratee.ZoomCache.Utils
import Data.Offset
import Data.ZoomCache.Common
import Data.ZoomCache.Multichannel.Common
import Data.ZoomCache.Multichannel.NList()
import Data.ZoomCache.NList
import Data.ZoomCache.Types
----------------------------------------------------------------------
supportMultichannel :: [IdentifyCodec] -> [IdentifyCodec]
supportMultichannel = f
where f x = x ++ [identifyCodecMultichannel (f x)]
runner1 :: Identity (I.Iteratee s Identity c) -> c
runner1 = runIdentity . I.run . runIdentity
identifyCodecMultichannel :: [IdentifyCodec] -> IdentifyCodec
identifyCodecMultichannel identifiers bs = runner1 $ I.enumPure1Chunk (Offset 0 bs) identifyMulti
where
identifyMulti :: (Functor m, Monad m) => I.Iteratee (Offset ByteString) m (Maybe Codec)
identifyMulti = do
mIdent <- OffI.takeBS 8
if mIdent == trackTypeMultichannel
then do
channels <- readInt32be
subIdentLength <- readInt32be
subCodec <- readCodec identifiers subIdentLength
return (fmap (foo channels) subCodec)
else return Nothing
foo :: Int -> Codec -> Codec
foo channels (Codec a) = reifyIntegral channels (\n -> Codec (NList n [a]))
----------------------------------------------------------------------
-- | Create a track map for a stream of a given type, as track no. 1
oneTrackMultichannel :: (ZoomReadable a)
=> Int -> a -> Bool -> Bool -> SampleRateType -> Rational -> ByteString -> TrackMap
oneTrackMultichannel channels a delta zlib !drType !rate !name =
IM.singleton 1 (mkTrackSpecMultichannel channels a delta zlib drType rate name)
{-# INLINABLE oneTrackMultichannel #-}
{-# DEPRECATED oneTrackMultichannel "Use setCodecMultichannel instead" #-}
mkTrackSpecMultichannel :: (ZoomReadable a)
=> Int -> a -> Bool -> Bool -> SampleRateType -> Rational -> ByteString
-> TrackSpec
mkTrackSpecMultichannel channels a = reifyIntegral channels
(\n -> TrackSpec (Codec (NList n [a])))
{-# DEPRECATED mkTrackSpecMultichannel "Use setCodecMultichannel instead" #-}
| kfish/zoom-cache | Data/ZoomCache/Multichannel/Internal.hs | bsd-2-clause | 3,084 | 0 | 16 | 617 | 625 | 348 | 277 | 50 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DoAndIfThenElse #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Web.Spock.Shared
(-- * Helpers for running Spock
runSpock, spockAsApp
-- * Action types
, SpockAction, SpockActionCtx, ActionT, W.ActionCtxT
-- * Handling requests
, request, header, rawHeader, cookie, reqMethod
, preferredFormat, ClientPreferredFormat(..)
, body, jsonBody, jsonBody'
, files, UploadedFile (..)
, params, param, param'
-- * Working with context
, getContext, runInContext
-- * Sending responses
, setStatus, setHeader, redirect, jumpNext, CookieSettings(..), defaultCookieSettings, CookieEOL(..), setCookie, deleteCookie, bytes, lazyBytes
, text, html, file, json, stream, response
-- * Middleware helpers
, middlewarePass, modifyVault, queryVault
-- * Configuration
, SpockCfg (..), defaultSpockCfg
-- * Database
, PoolOrConn (..), ConnBuilder (..), PoolCfg (..)
-- * Accessing Database and State
, HasSpock (runQuery, getState), SpockConn, SpockState, SpockSession
-- * Basic HTTP-Auth
, requireBasicAuth
-- * Sessions
, defaultSessionCfg, SessionCfg (..)
, defaultSessionHooks, SessionHooks (..)
, SessionPersistCfg(..), readShowSessionPersist
, SessionId
, getSessionId, readSession, writeSession
, modifySession, modifySession', modifyReadSession, mapAllSessions, clearAllSessions
-- * Internals for extending Spock
, getSpockHeart, runSpockIO, WebStateM, WebState
)
where
import Web.Spock.Internal.Monad
import Web.Spock.Internal.SessionManager
import Web.Spock.Internal.Types
import Web.Spock.Internal.CoreAction
import Control.Monad
import Control.Concurrent.STM (STM)
import System.Directory
import qualified Web.Spock.Internal.Wire as W
import qualified Network.Wai as Wai
import qualified Network.Wai.Handler.Warp as Warp
-- | Run a Spock application. Basically just a wrapper aroung @Warp.run@.
runSpock :: Warp.Port -> IO Wai.Middleware -> IO ()
runSpock port mw =
do putStrLn ("Spock is running on port " ++ show port)
app <- spockAsApp mw
Warp.run port app
-- | Convert a middleware to an application. All failing requests will
-- result in a 404 page
spockAsApp :: IO Wai.Middleware -> IO Wai.Application
spockAsApp = liftM W.middlewareToApp
-- | Get the current users sessionId. Note that this ID should only be
-- shown to it's owner as otherwise sessions can be hijacked.
getSessionId :: SpockActionCtx ctx conn sess st SessionId
getSessionId =
getSessMgr >>= sm_getSessionId
-- | Write to the current session. Note that all data is stored on the server.
-- The user only reciedes a sessionId to be identified.
writeSession :: sess -> SpockActionCtx ctx conn sess st ()
writeSession d =
do mgr <- getSessMgr
sm_writeSession mgr d
-- | Modify the stored session
modifySession :: (sess -> sess) -> SpockActionCtx ctx conn sess st ()
modifySession f =
modifySession' $ \sess -> (f sess, ())
-- | Modify the stored session and return a value
modifySession' :: (sess -> (sess, a)) -> SpockActionCtx ctx conn sess st a
modifySession' f =
do mgr <- getSessMgr
sm_modifySession mgr f
-- | Modify the stored session and return the new value after modification
modifyReadSession :: (sess -> sess) -> SpockActionCtx ctx conn sess st sess
modifyReadSession f =
modifySession' $ \sess ->
let x = f sess
in (x, x)
-- | Read the stored session
readSession :: SpockActionCtx ctx conn sess st sess
readSession =
do mgr <- getSessMgr
sm_readSession mgr
-- | Globally delete all existing sessions. This is useful for example if you want
-- to require all users to relogin
clearAllSessions :: SpockActionCtx ctx conn sess st ()
clearAllSessions =
do mgr <- getSessMgr
sm_clearAllSessions mgr
-- | Apply a transformation to all sessions. Be careful with this, as this
-- may cause many STM transaction retries.
mapAllSessions :: (sess -> STM sess) -> SpockActionCtx ctx conn sess st ()
mapAllSessions f =
do mgr <- getSessMgr
sm_mapSessions mgr f
-- | Simple session persisting configuration. DO NOT USE IN PRODUCTION
readShowSessionPersist :: (Read a, Show a) => FilePath -> SessionPersistCfg a
readShowSessionPersist fp =
SessionPersistCfg
{ spc_load =
do isThere <- doesFileExist fp
if isThere
then do str <- readFile fp
return (read str)
else return []
, spc_store = writeFile fp . show
}
| nmk/Spock | src/Web/Spock/Shared.hs | bsd-3-clause | 4,684 | 0 | 14 | 985 | 965 | 560 | 405 | 92 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
module Futhark.Optimise.Fusion.LoopKernel
( FusedKer(..)
, newKernel
, inputs
, setInputs
, arrInputs
, kernelType
, transformOutput
, attemptFusion
, SOAC
, MapNest
, toNestedSeqStream --not used!
)
where
import Control.Applicative
import Control.Arrow (first)
import Control.Monad
import qualified Data.HashSet as HS
import qualified Data.HashMap.Lazy as HM
import Data.Maybe
import Data.Monoid
import Data.List
import Prelude
import Futhark.Representation.SOACS hiding (SOAC(..))
import qualified Futhark.Representation.SOACS as Futhark
import Futhark.Transform.Rename (renameLambda)
import Futhark.Transform.Substitute
import Futhark.MonadFreshNames
import qualified Futhark.Analysis.HORepresentation.SOAC as SOAC
import qualified Futhark.Analysis.HORepresentation.MapNest as MapNest
import Futhark.Pass.ExtractKernels.ISRWIM (rwimPossible)
import Futhark.Optimise.Fusion.TryFusion
import Futhark.Optimise.Fusion.Composing
import Futhark.Construct
type SOAC = SOAC.SOAC SOACS
type MapNest = MapNest.MapNest SOACS
-- XXX: This function is very gross.
transformOutput :: SOAC.ArrayTransforms -> [VName] -> SOAC
-> Binder SOACS ()
transformOutput ts names soac = do
validents <- zipWithM newIdent (map baseString names) $ SOAC.typeOf soac
e <- SOAC.toExp soac
letBind_ (basicPattern' [] validents) e
descend ts validents
where descend ts' validents =
case SOAC.viewf ts' of
SOAC.EmptyF ->
forM_ (zip names validents) $ \(k, valident) ->
letBindNames' [k] $ PrimOp $ SubExp $ Var $ identName valident
t SOAC.:< ts'' -> do
let es = map (applyTransform t) validents
mkPat (Ident nm tp) = Pattern [] [PatElem nm BindVar tp]
opts <- concat <$> mapM primOpType es
newIds <- forM (zip names opts) $ \(k, opt) ->
newIdent (baseString k) opt
zipWithM_ letBind (map mkPat newIds) $ map PrimOp es
descend ts'' newIds
applyTransform :: SOAC.ArrayTransform -> Ident -> PrimOp
applyTransform (SOAC.Rearrange cs perm) v =
Rearrange cs perm $ identName v
applyTransform (SOAC.Reshape cs shape) v =
Reshape cs shape $ identName v
applyTransform (SOAC.ReshapeOuter cs shape) v =
let shapes = reshapeOuter shape 1 $ arrayShape $ identType v
in Reshape cs shapes $ identName v
applyTransform (SOAC.ReshapeInner cs shape) v =
let shapes = reshapeInner shape 1 $ arrayShape $ identType v
in Reshape cs shapes $ identName v
applyTransform (SOAC.Replicate n) v =
Replicate n $ Var $ identName v
inputToOutput :: SOAC.Input -> Maybe (SOAC.ArrayTransform, SOAC.Input)
inputToOutput (SOAC.Input ts ia iat) =
case SOAC.viewf ts of
t SOAC.:< ts' -> Just (t, SOAC.Input ts' ia iat)
SOAC.EmptyF -> Nothing
data FusedKer = FusedKer {
fsoac :: SOAC
-- ^ the SOAC expression, e.g., mapT( f(a,b), x, y )
, inplace :: Names
-- ^ every kernel maintains a set of variables
-- that alias vars used in in-place updates,
-- such that fusion is prevented to move
-- a use of an
, fusedVars :: [VName]
-- ^ whether at least a fusion has been performed.
, kernelScope :: Scope SOACS
-- ^ The names in scope at the kernel.
, outputTransform :: SOAC.ArrayTransforms
, outNames :: [VName]
}
deriving (Show)
newKernel :: SOAC -> [VName] -> Scope SOACS -> FusedKer
newKernel soac out_nms scope =
FusedKer { fsoac = soac
, inplace = HS.empty
, fusedVars = []
, outputTransform = SOAC.noTransforms
, outNames = out_nms
, kernelScope = scope
}
arrInputs :: FusedKer -> HS.HashSet VName
arrInputs = HS.fromList . map SOAC.inputArray . inputs
inputs :: FusedKer -> [SOAC.Input]
inputs = SOAC.inputs . fsoac
setInputs :: [SOAC.Input] -> FusedKer -> FusedKer
setInputs inps ker = ker { fsoac = inps `SOAC.setInputs` fsoac ker }
kernelType :: FusedKer -> [Type]
kernelType = SOAC.typeOf . fsoac
tryOptimizeSOAC :: Names -> [VName] -> SOAC -> FusedKer
-> TryFusion FusedKer
tryOptimizeSOAC unfus_nms outVars soac ker = do
(soac', ots) <- optimizeSOAC Nothing soac mempty
let ker' = map (SOAC.addTransforms ots) (inputs ker) `setInputs` ker
outIdents = zipWith Ident outVars $ SOAC.typeOf soac'
ker'' = fixInputTypes outIdents ker'
applyFusionRules unfus_nms outVars soac' ker''
tryOptimizeKernel :: Names -> [VName] -> SOAC -> FusedKer
-> TryFusion FusedKer
tryOptimizeKernel unfus_nms outVars soac ker = do
ker' <- optimizeKernel (Just outVars) ker
applyFusionRules unfus_nms outVars soac ker'
tryExposeInputs :: Names -> [VName] -> SOAC -> FusedKer
-> TryFusion FusedKer
tryExposeInputs unfus_nms outVars soac ker = do
(ker', ots) <- exposeInputs outVars ker
if SOAC.nullTransforms ots
then fuseSOACwithKer unfus_nms outVars soac ker'
else do
(soac', ots') <- pullOutputTransforms soac ots
let outIdents = zipWith Ident outVars $ SOAC.typeOf soac'
ker'' = fixInputTypes outIdents ker'
if SOAC.nullTransforms ots'
then applyFusionRules unfus_nms outVars soac' ker''
else fail "tryExposeInputs could not pull SOAC transforms"
fixInputTypes :: [Ident] -> FusedKer -> FusedKer
fixInputTypes outIdents ker =
ker { fsoac = fixInputTypes' $ fsoac ker }
where fixInputTypes' soac =
map fixInputType (SOAC.inputs soac) `SOAC.setInputs` soac
fixInputType (SOAC.Input ts v _)
| Just v' <- find ((==v) . identName) outIdents =
SOAC.Input ts v $ identType v'
fixInputType inp = inp
applyFusionRules :: Names -> [VName] -> SOAC -> FusedKer
-> TryFusion FusedKer
applyFusionRules unfus_nms outVars soac ker =
tryOptimizeSOAC unfus_nms outVars soac ker <|>
tryOptimizeKernel unfus_nms outVars soac ker <|>
tryExposeInputs unfus_nms outVars soac ker <|>
fuseSOACwithKer unfus_nms outVars soac ker
attemptFusion :: MonadFreshNames m =>
Names -> [VName] -> SOAC -> FusedKer
-> m (Maybe FusedKer)
attemptFusion unfus_nms outVars soac ker =
fmap removeUnusedParamsFromKer <$>
tryFusion (applyFusionRules unfus_nms outVars soac ker)
(kernelScope ker)
removeUnusedParamsFromKer :: FusedKer -> FusedKer
removeUnusedParamsFromKer ker =
case soac of
SOAC.Map {} -> ker { fsoac = soac' }
SOAC.Redomap {} -> ker { fsoac = soac' }
SOAC.Scanomap {} -> ker { fsoac = soac' }
_ -> ker
where soac = fsoac ker
l = SOAC.lambda soac
inps = SOAC.inputs soac
(l', inps') = removeUnusedParams l inps
soac' = l' `SOAC.setLambda`
(inps' `SOAC.setInputs` soac)
removeUnusedParams :: Lambda -> [SOAC.Input] -> (Lambda, [SOAC.Input])
removeUnusedParams l inps =
(l { lambdaParams = accParams ++ ps' }, inps')
where allParams = lambdaParams l
(accParams, arrParams) =
splitAt (length allParams - length inps) allParams
pInps = zip arrParams inps
(ps', inps') = case (unzip $ filter (used . fst) pInps, pInps) of
(([], []), (p,inp):_) -> ([p], [inp])
((ps_, inps_), _) -> (ps_, inps_)
used p = paramName p `HS.member` freeVars
freeVars = freeInBody $ lambdaBody l
-- | Check that the consumer uses at least one output of the producer
-- unmodified.
mapFusionOK :: [VName] -> FusedKer -> Bool
mapFusionOK outVars ker = any (`elem` inpIds) outVars
where inpIds = mapMaybe SOAC.isVarishInput (inputs ker)
-- | Check that the consumer uses all the outputs of the producer unmodified.
mapWriteFusionOK :: [VName] -> FusedKer -> Bool
mapWriteFusionOK outVars ker = all (`elem` inpIds) outVars
where inpIds = mapMaybe SOAC.isVarishInput (inputs ker)
-- | The brain of this module: Fusing a SOAC with a Kernel.
fuseSOACwithKer :: Names -> [VName] -> SOAC -> FusedKer
-> TryFusion FusedKer
fuseSOACwithKer unfus_set outVars soac1 ker = do
-- We are fusing soac1 into soac2, i.e, the output of soac1 is going
-- into soac2.
let soac2 = fsoac ker
cs1 = SOAC.certificates soac1
cs2 = SOAC.certificates soac2
inp1_arr = SOAC.inputs soac1
horizFuse= not (HS.null unfus_set) &&
SOAC.width soac1 == SOAC.width soac2
inp2_arr = SOAC.inputs soac2
lam1 = SOAC.lambda soac1
lam2 = SOAC.lambda soac2
w = SOAC.width soac1
returned_outvars = filter (`HS.member` unfus_set) outVars
success res_outnms res_soac = do
let fusedVars_new = fusedVars ker++outVars
-- Avoid name duplication, because the producer lambda is not
-- removed from the program until much later.
uniq_lam <- renameLambda $ SOAC.lambda res_soac
return $ ker { fsoac = uniq_lam `SOAC.setLambda` res_soac
, fusedVars = fusedVars_new
, outNames = res_outnms
}
outPairs <- forM (zip outVars $ SOAC.typeOf soac1) $ \(outVar, t) -> do
outVar' <- newVName $ baseString outVar ++ "_elem"
return (outVar, Ident outVar' t)
let mapLikeFusionCheck =
let (res_lam, new_inp) = fuseMaps unfus_set lam1 inp1_arr outPairs lam2 inp2_arr
(extra_nms,extra_rtps) = unzip $ filter ((`HS.member` unfus_set) . fst) $
zip outVars $ map (stripArray 1) $ SOAC.typeOf soac1
res_lam' = res_lam { lambdaReturnType = lambdaReturnType res_lam ++ extra_rtps }
in (extra_nms, res_lam', new_inp)
case (soac2, soac1) of
------------------------------
-- Redomap-Redomap Fusions: --
------------------------------
(SOAC.Map {}, SOAC.Map {})
| mapFusionOK outVars ker || horizFuse -> do
let (extra_nms, res_lam', new_inp) = mapLikeFusionCheck
success (outNames ker ++ extra_nms) $
SOAC.Map (cs1++cs2) w res_lam' new_inp
(SOAC.Map {}, SOAC.Redomap _ _ comm1 lam11 _ nes _)
| mapFusionOK (drop (length nes) outVars) ker || horizFuse -> do
let (res_lam', new_inp) = fuseRedomap unfus_set outVars nes lam1 inp1_arr
outPairs lam2 inp2_arr
unfus_accs = take (length nes) outVars
unfus_arrs = returned_outvars \\ unfus_accs
success (unfus_accs ++ outNames ker ++ unfus_arrs) $
SOAC.Redomap (cs1++cs2) w comm1 lam11 res_lam' nes new_inp
(SOAC.Redomap _ _ comm2 lam2r _ nes2 _, SOAC.Redomap _ _ comm1 lam1r _ nes1 _)
| mapFusionOK (drop (length nes1) outVars) ker || horizFuse -> do
let (res_lam', new_inp) = fuseRedomap unfus_set outVars nes1 lam1 inp1_arr
outPairs lam2 inp2_arr
unfus_accs = take (length nes1) outVars
unfus_arrs = returned_outvars \\ unfus_accs
lamr = mergeReduceOps lam1r lam2r
success (unfus_accs ++ outNames ker ++ unfus_arrs) $
SOAC.Redomap (cs1++cs2) w (comm1<>comm2) lamr res_lam' (nes1++nes2) new_inp
(SOAC.Redomap _ _ comm2 lam21 _ nes _, SOAC.Map {})
| mapFusionOK outVars ker || horizFuse -> do
let (res_lam, new_inp) = fuseMaps unfus_set lam1 inp1_arr outPairs lam2 inp2_arr
(_,extra_rtps) = unzip $ filter ((`HS.member` unfus_set) . fst) $
zip outVars $ map (stripArray 1) $ SOAC.typeOf soac1
res_lam' = res_lam { lambdaReturnType = lambdaReturnType res_lam ++ extra_rtps }
success (outNames ker ++ returned_outvars) $
SOAC.Redomap (cs1++cs2) w comm2 lam21 res_lam' nes new_inp
----------------------------
-- Scanomap Fusions: --
----------------------------
(SOAC.Scanomap _ _ lam2r _ nes2 _, SOAC.Scanomap _ _ lam1r _ nes1 _)
| horizFuse -> do
let (res_lam', new_inp) = fuseRedomap unfus_set outVars nes1 lam1 inp1_arr outPairs lam2 inp2_arr
lamr = mergeReduceOps lam1r lam2r
unfus_arrs = returned_outvars \\ unfus_accs
unfus_accs = take (length nes1) outVars
success (unfus_accs ++ outNames ker ++ unfus_arrs) $
SOAC.Scanomap (cs1++cs2) w lamr res_lam' (nes1++nes2) new_inp
-- Map -> Scanomap Fusion
(SOAC.Scanomap _ _ lam21 _ nes _, SOAC.Map {})
| mapFusionOK outVars ker || horizFuse -> do
-- Create new inner reduction function
let (res_lam, new_inp) = fuseMaps unfus_set lam1 inp1_arr outPairs lam2 inp2_arr
-- Get the lists from soac1 that still need to be returned
(_,extra_rtps) = unzip $ filter (\(nm,_)->nm `HS.member` unfus_set) $
zip outVars $ map (stripArray 1) $ SOAC.typeOf soac1
res_lam' = res_lam { lambdaReturnType = lambdaReturnType res_lam ++ extra_rtps }
success (outNames ker ++ returned_outvars) $
SOAC.Scanomap (cs1++cs2) w lam21 res_lam' nes new_inp
------------------
-- Write fusion --
------------------
-- Map-write fusion.
(SOAC.Write _cs _len _lam _ivs as,
SOAC.Map {})
| mapWriteFusionOK (outVars ++ map snd as) ker -> do
let (extra_nms, res_lam', new_inp) = mapLikeFusionCheck
success (outNames ker ++ extra_nms) $
SOAC.Write (cs1++cs2) w res_lam' new_inp as
-- Write-write fusion.
(SOAC.Write _cs2 _len2 _lam2 ivs2 as2,
SOAC.Write _cs1 _len1 _lam1 ivs1 as1)
| horizFuse -> do
let zipW xs ys = ys1 ++ xs1 ++ ys2 ++ xs2
where len = length xs `div` 2 -- same as with ys
xs1 = take len xs
xs2 = drop len xs
ys1 = take len ys
ys2 = drop len ys
let (body1, body2) = (lambdaBody lam1, lambdaBody lam2)
let body' = Body { bodyLore = bodyLore body1 -- body1 and body2 have the same lores
, bodyBindings = bodyBindings body1 ++ bodyBindings body2
, bodyResult = zipW (bodyResult body1) (bodyResult body2)
}
let lam' = Lambda { lambdaParams = lambdaParams lam1 ++ lambdaParams lam2
, lambdaBody = body'
, lambdaReturnType = zipW (lambdaReturnType lam1) (lambdaReturnType lam2)
}
success (outNames ker ++ returned_outvars) $
SOAC.Write (cs1 ++ cs2) w lam' (ivs1 ++ ivs2) (as2 ++ as1)
(SOAC.Write {}, _) ->
fail "Cannot fuse a write with anything else than a write or a map"
(_, SOAC.Write {}) ->
fail "Cannot fuse a write with anything else than a write or a map"
----------------------------
-- Stream-Stream Fusions: --
----------------------------
(SOAC.Stream _ _ Sequential{} _ _, SOAC.Stream _ _ form1@Sequential{} _ _)
| mapFusionOK (drop (length $ getStreamAccums form1) outVars) ker || horizFuse -> do
-- fuse two SEQUENTIAL streams
(res_nms, res_stream) <- fuseStreamHelper (outNames ker) unfus_set outVars outPairs soac2 soac1
success res_nms res_stream
(SOAC.Stream _ _ Sequential{} _ _, SOAC.Stream _ _ Sequential{} _ _) ->
fail "Fusion conditions not met for two SEQ streams!"
(SOAC.Stream _ _ Sequential{} _ _, SOAC.Stream{}) ->
fail "Cannot fuse a parallel with a sequential Stream!"
(SOAC.Stream{}, SOAC.Stream _ _ Sequential{} _ _) ->
fail "Cannot fuse a parallel with a sequential Stream!"
(SOAC.Stream{}, SOAC.Stream _ _ form1 _ _)
| mapFusionOK (drop (length $ getStreamAccums form1) outVars) ker || horizFuse -> do
-- fuse two PARALLEL streams
(res_nms, res_stream) <- fuseStreamHelper (outNames ker) unfus_set outVars outPairs soac2 soac1
success res_nms res_stream
(SOAC.Stream{}, SOAC.Stream {}) ->
fail "Fusion conditions not met for two PAR streams!"
-------------------------------------------------------------------
--- If one is a stream, translate the other to a stream as well.---
--- This does not get in trouble (infinite computation) because ---
--- scan's translation to Stream introduces a hindrance to ---
--- (horizontal fusion), hence repeated application is for the---
--- moment impossible. However, if with a dependence-graph rep---
--- we could run in an infinite recursion, i.e., repeatedly ---
--- fusing map o scan into an infinity of Stream levels! ---
-------------------------------------------------------------------
(SOAC.Stream _ _ form2 _ _, _) -> do
-- If this rule is matched then soac1 is NOT a stream.
-- To fuse a stream kernel, we transform soac1 to a stream, which
-- borrows the sequential/parallel property of the soac2 Stream,
-- and recursively perform stream-stream fusion.
(soac1', newacc_ids) <- SOAC.soacToStream soac1
soac1'' <- case form2 of
Sequential{} -> toSeqStream soac1'
_ -> return soac1'
fuseSOACwithKer unfus_set (map identName newacc_ids++outVars) soac1'' ker
(_, SOAC.Scan {}) -> do
-- A Scan soac can be currently only fused as a (sequential) stream,
-- hence it is first translated to a (sequential) Stream and then
-- fusion with a kernel is attempted.
(soac1', newacc_ids) <- SOAC.soacToStream soac1
fuseSOACwithKer unfus_set (map identName newacc_ids++outVars) soac1' ker
(_, SOAC.Stream _ _ form1 _ _) -> do
-- If it reached this case then soac2 is NOT a Stream kernel,
-- hence transform the kernel's soac to a stream and attempt
-- stream-stream fusion recursivelly.
-- The newly created stream corresponding to soac2 borrows the
-- sequential/parallel property of the soac1 stream.
(soac2', newacc_ids) <- SOAC.soacToStream soac2
soac2'' <- case form1 of
Sequential _ -> toSeqStream soac2'
_ -> return soac2'
fuseSOACwithKer unfus_set outVars soac1 $
ker { fsoac = soac2'', outNames = map identName newacc_ids ++ outNames ker }
---------------------------------
--- DEFAULT, CANNOT FUSE CASE ---
---------------------------------
_ -> fail "Cannot fuse"
fuseStreamHelper :: [VName] -> Names -> [VName] -> [(VName,Ident)]
-> SOAC -> SOAC -> TryFusion ([VName], SOAC)
fuseStreamHelper out_kernms unfus_set outVars outPairs
(SOAC.Stream cs2 w2 form2 lam2 inp2_arr)
(SOAC.Stream cs1 _ form1 lam1 inp1_arr) =
if getStreamOrder form2 /= getStreamOrder form1
then fail "fusion conditions not met!"
else do -- very similar to redomap o redomap composition,
-- but need to remove first the `chunk' and `i'
-- parameters of streams' lambdas and put them
-- lab in the resulting stream lambda.
let nes1 = getStreamAccums form1
chunk1 = head $ lambdaParams lam1
chunk2 = head $ lambdaParams lam2
hmnms = HM.fromList [(paramName chunk2, paramName chunk1)]
lam20 = substituteNames hmnms lam2
lam1' = lam1 { lambdaParams = tail $ lambdaParams lam1 }
lam2' = lam20 { lambdaParams = tail $ lambdaParams lam20 }
(res_lam', new_inp) = fuseRedomap unfus_set outVars nes1 lam1'
inp1_arr outPairs lam2' inp2_arr
res_lam'' = res_lam' { lambdaParams = chunk1 : lambdaParams res_lam' }
unfus_accs = take (length nes1) outVars
unfus_arrs = filter (`HS.member` unfus_set) outVars
res_form <- mergeForms form2 form1
return ( unfus_accs ++ out_kernms ++ unfus_arrs,
SOAC.Stream (cs1++cs2) w2 res_form res_lam'' new_inp )
where mergeForms (MapLike _) (MapLike o ) = return $ MapLike o
mergeForms (MapLike _) (RedLike o comm lam0 acc0) = return $ RedLike o comm lam0 acc0
mergeForms (RedLike o comm lam0 acc0) (MapLike _) = return $ RedLike o comm lam0 acc0
mergeForms (Sequential acc2) (Sequential acc1) = return $ Sequential (acc1++acc2)
mergeForms (RedLike _ comm2 lam2r acc2) (RedLike o1 comm1 lam1r acc1) =
return $ RedLike o1 (comm1<>comm2) (mergeReduceOps lam1r lam2r) (acc1++acc2)
mergeForms _ _ = fail "Fusing sequential to parallel stream disallowed!"
fuseStreamHelper _ _ _ _ _ _ = fail "Cannot Fuse Streams!"
-- | If a Stream is passed as argument then it converts it to a
-- Sequential Stream; Otherwise it FAILS!
toSeqStream :: SOAC -> TryFusion SOAC
toSeqStream s@(SOAC.Stream _ _ (Sequential _) _ _) = return s
toSeqStream (SOAC.Stream cs w (MapLike _) l inps) =
return $ SOAC.Stream cs w (Sequential []) l inps
toSeqStream (SOAC.Stream cs w (RedLike _ _ _ acc) l inps) =
return $ SOAC.Stream cs w (Sequential acc) l inps
toSeqStream _ = fail "toSeqStream expects a string, but given a SOAC."
-- | This is not currently used, but it might be useful in the future,
-- so I am going to export it in order not to complain about it.
toNestedSeqStream :: SOAC -> TryFusion SOAC
--toNestedSeqStream s@(SOAC.Stream _ (Sequential _) _ _ _) = return s
toNestedSeqStream (SOAC.Stream cs w form lam arrs) = do
innerlam <- renameLambda lam
instrm_resids <- mapM (newIdent "res_instream") $ lambdaReturnType lam
let inner_extlam = ExtLambda (lambdaParams innerlam)
(lambdaBody innerlam)
(staticShapes $ lambdaReturnType innerlam)
nes = getStreamAccums form
instrm_inarrs = drop (1 + length nes) $ map paramName $ lambdaParams lam
insoac = Futhark.Stream cs w form inner_extlam instrm_inarrs
lam_bind = mkLet' [] instrm_resids $ Op insoac
lam_body = mkBody [lam_bind] $ map (Futhark.Var . identName) instrm_resids
lam' = lam { lambdaBody = lam_body }
return $ SOAC.Stream cs w (Sequential nes) lam' arrs
toNestedSeqStream _ = fail "In toNestedSeqStream: Input paramter not a stream"
-- Here follows optimizations and transforms to expose fusability.
optimizeKernel :: Maybe [VName] -> FusedKer -> TryFusion FusedKer
optimizeKernel inp ker = do
(soac, resTrans) <- optimizeSOAC inp (fsoac ker) startTrans
return $ ker { fsoac = soac
, outputTransform = resTrans
}
where startTrans = outputTransform ker
optimizeSOAC :: Maybe [VName] -> SOAC -> SOAC.ArrayTransforms
-> TryFusion (SOAC, SOAC.ArrayTransforms)
optimizeSOAC inp soac os = do
res <- foldM comb (False, soac, os) optimizations
case res of
(False, _, _) -> fail "No optimisation applied"
(True, soac', os') -> return (soac', os')
where comb (changed, soac', os') f = do
(soac'', os'') <- f inp soac' os
return (True, soac'', os'')
<|> return (changed, soac', os')
type Optimization = Maybe [VName]
-> SOAC
-> SOAC.ArrayTransforms
-> TryFusion (SOAC, SOAC.ArrayTransforms)
optimizations :: [Optimization]
optimizations = [iswim, scanToScanomap]
iswim :: Maybe [VName] -> SOAC -> SOAC.ArrayTransforms
-> TryFusion (SOAC, SOAC.ArrayTransforms)
iswim _ (SOAC.Scan cs w scan_fun scan_input) ots
| Just (map_pat, map_cs, map_w, map_fun) <- rwimPossible scan_fun,
(nes, arrs) <- unzip scan_input,
Just nes_names <- mapM subExpVar nes = do
let nes_idents = zipWith Ident nes_names $ lambdaReturnType scan_fun
nes' = map SOAC.identInput nes_idents
map_arrs' = nes' ++ map (SOAC.transposeInput 0 1) arrs
(scan_acc_params, scan_elem_params) =
splitAt (length arrs) $ lambdaParams scan_fun
map_params = map removeParamOuterDim scan_acc_params ++
map (setParamOuterDimTo w) scan_elem_params
map_rettype = map (setOuterDimTo w) $ lambdaReturnType scan_fun
map_fun' = Lambda map_params map_body map_rettype
scan_params = lambdaParams map_fun
scan_body = lambdaBody map_fun
scan_rettype = lambdaReturnType map_fun
scan_fun' = Lambda scan_params scan_body scan_rettype
scan_input' = map (first Var) $
uncurry zip $ splitAt (length nes') $ map paramName map_params
map_body = mkBody [Let (setPatternOuterDimTo w map_pat) () $
Op $ Futhark.Scan cs w scan_fun' scan_input'] $
map Var $ patternNames map_pat
let perm = case lambdaReturnType map_fun of
[] -> []
t:_ -> 1 : 0 : [2..arrayRank t]
return (SOAC.Map map_cs map_w map_fun' map_arrs',
ots SOAC.|> SOAC.Rearrange map_cs perm)
iswim _ _ _ =
fail "ISWIM does not apply."
scanToScanomap :: Maybe [VName] -> SOAC -> SOAC.ArrayTransforms
-> TryFusion (SOAC, SOAC.ArrayTransforms)
scanToScanomap _ (SOAC.Scan cs w scan_fun scan_input) ots = do
let (nes, array_inputs) = unzip scan_input
return (SOAC.Scanomap cs w scan_fun scan_fun nes array_inputs, ots)
scanToScanomap _ _ _ =
fail "Only turn scan into scanomaps"
removeParamOuterDim :: LParam -> LParam
removeParamOuterDim param =
let t = rowType $ paramType param
in param { paramAttr = t }
setParamOuterDimTo :: SubExp -> LParam -> LParam
setParamOuterDimTo w param =
let t = setOuterDimTo w $ paramType param
in param { paramAttr = t }
setIdentOuterDimTo :: SubExp -> Ident -> Ident
setIdentOuterDimTo w ident =
let t = setOuterDimTo w $ identType ident
in ident { identType = t }
setOuterDimTo :: SubExp -> Type -> Type
setOuterDimTo w t =
arrayOfRow (rowType t) w
setPatternOuterDimTo :: SubExp -> Pattern -> Pattern
setPatternOuterDimTo w pat =
basicPattern' [] $ map (setIdentOuterDimTo w) $ patternValueIdents pat
-- Now for fiddling with transpositions...
commonTransforms :: [VName] -> [SOAC.Input]
-> (SOAC.ArrayTransforms, [SOAC.Input])
commonTransforms interesting inps = commonTransforms' inps'
where inps' = [ (SOAC.inputArray inp `elem` interesting, inp)
| inp <- inps ]
commonTransforms' :: [(Bool, SOAC.Input)] -> (SOAC.ArrayTransforms, [SOAC.Input])
commonTransforms' inps =
case foldM inspect (Nothing, []) inps of
Just (Just mot, inps') -> first (mot SOAC.<|) $ commonTransforms' $ reverse inps'
_ -> (SOAC.noTransforms, map snd inps)
where inspect (mot, prev) (True, inp) =
case (mot, inputToOutput inp) of
(Nothing, Just (ot, inp')) -> Just (Just ot, (True, inp') : prev)
(Just ot1, Just (ot2, inp'))
| ot1 == ot2 -> Just (Just ot2, (True, inp') : prev)
_ -> Nothing
inspect (mot, prev) inp = Just (mot,inp:prev)
mapDepth :: MapNest -> Int
mapDepth (MapNest.MapNest _ _ lam levels _) =
min resDims (length levels) + 1
where resDims = minDim $ case levels of
[] -> lambdaReturnType lam
nest:_ -> MapNest.nestingReturnType nest
minDim [] = 0
minDim (t:ts) = foldl min (arrayRank t) $ map arrayRank ts
pullRearrange :: SOAC -> SOAC.ArrayTransforms
-> TryFusion (SOAC, SOAC.ArrayTransforms)
pullRearrange soac ots = do
nest <- join $ liftMaybe <$> MapNest.fromSOAC soac
SOAC.Rearrange cs perm SOAC.:< ots' <- return $ SOAC.viewf ots
if rearrangeReach perm <= mapDepth nest then do
let -- Expand perm to cover the full extent of the input dimensionality
perm' inp = perm ++ [length perm..SOAC.inputRank inp-1]
addPerm inp = SOAC.addTransform (SOAC.Rearrange cs $ perm' inp) inp
inputs' = map addPerm $ MapNest.inputs nest
soac' <- MapNest.toSOAC $
inputs' `MapNest.setInputs` rearrangeReturnTypes nest perm
return (soac', ots')
else fail "Cannot pull transpose"
pushRearrange :: [VName] -> SOAC -> SOAC.ArrayTransforms
-> TryFusion (SOAC, SOAC.ArrayTransforms)
pushRearrange inpIds soac ots = do
nest <- join $ liftMaybe <$> MapNest.fromSOAC soac
(perm, inputs') <- liftMaybe $ fixupInputs inpIds $ MapNest.inputs nest
if rearrangeReach perm <= mapDepth nest then do
let invertRearrange = SOAC.Rearrange [] $ rearrangeInverse perm
soac' <- MapNest.toSOAC $
inputs' `MapNest.setInputs`
rearrangeReturnTypes nest perm
return (soac', ots SOAC.|> invertRearrange)
else fail "Cannot push transpose"
-- | Actually also rearranges indices.
rearrangeReturnTypes :: MapNest -> [Int] -> MapNest
rearrangeReturnTypes nest@(MapNest.MapNest cs w body nestings inps) perm =
MapNest.MapNest cs w
body
(zipWith setReturnType
nestings $
drop 1 $ iterate (map rowType) ts)
inps
where origts = MapNest.typeOf nest
ts = map (rearrangeType perm) origts
setReturnType nesting t' =
nesting { MapNest.nestingReturnType = t' }
fixupInputs :: [VName] -> [SOAC.Input] -> Maybe ([Int], [SOAC.Input])
fixupInputs inpIds inps =
case mapMaybe inputRearrange $ filter exposable inps of
perm:_ -> do inps' <- mapM (fixupInput (rearrangeReach perm) perm) inps
return (perm, inps')
_ -> Nothing
where exposable = (`elem` inpIds) . SOAC.inputArray
inputRearrange (SOAC.Input ts _ _)
| _ SOAC.:> SOAC.Rearrange _ perm <- SOAC.viewl ts = Just perm
inputRearrange _ = Nothing
fixupInput d perm inp
| SOAC.inputRank inp >= d =
Just $ SOAC.addTransform (SOAC.Rearrange [] $ rearrangeInverse perm) inp
| otherwise = Nothing
pullReshape :: SOAC -> SOAC.ArrayTransforms -> TryFusion (SOAC, SOAC.ArrayTransforms)
pullReshape (SOAC.Map mapcs _ maplam inps) ots
| SOAC.Reshape cs shape SOAC.:< ots' <- SOAC.viewf ots,
all primType $ lambdaReturnType maplam = do
let mapw' = case reverse $ newDims shape of
[] -> intConst Int32 0
d:_ -> d
inputs' = map (SOAC.addTransform $ SOAC.ReshapeOuter cs shape) inps
inputTypes = map SOAC.inputType inputs'
let outersoac :: ([SOAC.Input] -> SOAC) -> (SubExp, [SubExp])
-> TryFusion ([SOAC.Input] -> SOAC)
outersoac inner (w, outershape) = do
let addDims t = arrayOf t (Shape outershape) NoUniqueness
retTypes = map addDims $ lambdaReturnType maplam
ps <- forM inputTypes $ \inpt ->
newParam "pullReshape_param" $
stripArray (length shape-length outershape) inpt
inner_body <- runBodyBinder $
eBody [SOAC.toExp $ inner $ map (SOAC.identInput . paramIdent) ps]
let inner_fun = Lambda { lambdaParams = ps
, lambdaReturnType = retTypes
, lambdaBody = inner_body
}
return $ SOAC.Map [] w inner_fun
op' <- foldM outersoac (SOAC.Map mapcs mapw' maplam) $
zip (drop 1 $ reverse $ newDims shape) $
drop 1 $ reverse $ drop 1 $ tails $ newDims shape
return (op' inputs', ots')
pullReshape _ _ = fail "Cannot pull reshape"
-- We can make a Replicate output-transform part of a map SOAC simply
-- by adding another dimension to the SOAC.
pullReplicate :: SOAC -> SOAC.ArrayTransforms -> TryFusion (SOAC, SOAC.ArrayTransforms)
pullReplicate [email protected]{} ots
| SOAC.Replicate n SOAC.:< ots' <- SOAC.viewf ots = do
let rettype = SOAC.typeOf soac
body <- runBodyBinder $ do
names <- letTupExp "pull_replicate" =<< SOAC.toExp soac
resultBodyM $ map Var names
let lam = Lambda { lambdaReturnType = rettype
, lambdaBody = body
, lambdaParams = []
}
return (SOAC.Map [] n lam [], ots')
pullReplicate _ _ = fail "Cannot pull replicate"
-- Tie it all together in exposeInputs (for making inputs to a
-- consumer available) and pullOutputTransforms (for moving
-- output-transforms of a producer to its inputs instead).
exposeInputs :: [VName] -> FusedKer
-> TryFusion (FusedKer, SOAC.ArrayTransforms)
exposeInputs inpIds ker = do
let soac = fsoac ker
(exposeInputs' =<< pushRearrange' soac) <|>
(exposeInputs' =<< pullRearrange' soac) <|>
exposeInputs' ker
where ot = outputTransform ker
pushRearrange' soac = do
(soac', ot') <- pushRearrange inpIds soac ot
return ker { fsoac = soac'
, outputTransform = ot'
}
pullRearrange' soac = do
(soac',ot') <- pullRearrange soac ot
unless (SOAC.nullTransforms ot') $
fail "pullRearrange was not enough"
return ker { fsoac = soac'
, outputTransform = SOAC.noTransforms
}
exposeInputs' ker' =
case commonTransforms inpIds $ inputs ker' of
(ot', inps') | all exposed inps' ->
return (ker' { fsoac = inps' `SOAC.setInputs` fsoac ker'}, ot')
_ -> fail "Cannot expose"
exposed (SOAC.Input ts _ _)
| SOAC.nullTransforms ts = True
exposed inp = SOAC.inputArray inp `notElem` inpIds
outputTransformPullers :: [SOAC -> SOAC.ArrayTransforms -> TryFusion (SOAC, SOAC.ArrayTransforms)]
outputTransformPullers = [pullRearrange, pullReshape, pullReplicate]
pullOutputTransforms :: SOAC -> SOAC.ArrayTransforms
-> TryFusion (SOAC, SOAC.ArrayTransforms)
pullOutputTransforms = attempt outputTransformPullers
where attempt [] _ _ = fail "Cannot pull anything"
attempt (p:ps) soac ots = do
(soac',ots') <- p soac ots
if SOAC.nullTransforms ots' then return (soac', SOAC.noTransforms)
else pullOutputTransforms soac' ots' <|> return (soac', ots')
<|> attempt ps soac ots
| mrakgr/futhark | src/Futhark/Optimise/Fusion/LoopKernel.hs | bsd-3-clause | 34,103 | 259 | 19 | 9,291 | 6,898 | 4,035 | 2,863 | 601 | 22 |
{-# OPTIONS_GHC
-XFlexibleInstances
-XOverlappingInstances
-XMultiParamTypeClasses
-XFlexibleContexts
-XUndecidableInstances
-XTemplateHaskell
-cpp #-}
module Text.RJson (TranslateField,
TranslateFieldD,
translateField,
ToJson,
ToJsonD,
toJson,
exclude,
arrayPrepend,
arrayAppend,
objectExtras,
genericToJson,
enumToJson,
JsonData(..),
FromJson,
FromJsonD,
objectDefaults,
parseJsonString,
parseJsonByteString,
fromJson,
fromJsonString,
fromJsonByteString,
genericFromJson,
enumFromJson,
stripInitialUnderscores,
toJsonString,
firstCharToUpper,
firstCharToLower,
Union(..), Union3, Union4, Union5, Union6,
Union7,Union8,Union9,Union10,
cond)
where
import Data.Generics.SYB.WithClass.Basics
import Data.Generics.SYB.WithClass.Instances
import Data.Generics.SYB.WithClass.Context
import Data.Generics.SYB.WithClass.Derive
import qualified Data.Map as M
import qualified Text.Printf as Printf
import Data.Char
import Data.Ratio
import Data.Array
import Data.Maybe
import Control.Monad.State.Strict
import Control.Monad.Trans
import Control.Monad.Error
import qualified Text.ParserCombinators.Parsec as P
import qualified Data.ByteString.Lazy as B
import System.IO.Unsafe
import qualified Control.Exception as E
import Codec.Text.IConv
import qualified Data.Word as W
-- | A Haskell representation of a JSON
-- data structure.
data JsonData = JDString String |
JDNumber Double |
JDArray [JsonData] |
JDBool Bool |
JDNull |
JDObject (M.Map String JsonData)
listJoin :: a -> [a] -> [a]
listJoin _ [] = []
listJoin _ l@[x] = l
listJoin k (x:ys) = x : k : (listJoin k ys)
concatJoin :: String -> [String] -> String
concatJoin k l = concat (listJoin k l)
alistToJsonDict :: [(String, String)] -> String
alistToJsonDict l =
"{" ++
concatJoin "," (map (\(k,v) -> (escapeString k) ++ ":" ++ v) l)
++ "}"
-- Special characters which will be pretty printed.
escapeMap :: M.Map Char String
escapeMap = M.fromList [
('\\', "\\"), ('"', "\""), ('\'', "'"), ('\n', "n"),
('\r', "r"), ('\f', "f"), ('\t', "t"), ('\b', "\b")]
escape :: Char -> Maybe String
escape c = M.lookup c escapeMap
-- Characters which can safely be printed as literals.
allowed' c o
| o > 127 = True -- Any unicode char is OK.
| o >= 32 && o < 127 {- exclude DEL == 127 -} && c /= '"' = True
| True = False
allowed c = allowed' c (ord c)
hexEscape :: Char -> String
hexEscape c = Printf.printf "\\u%04x" (ord c)
escapeString' :: String -> String
escapeString' [] = "\""
escapeString' (c:cs)
| allowed c =
c : (escapeString' cs)
| True =
(maybe (hexEscape c) (\s -> "\\" ++ s) (escape c)) ++
(escapeString' cs)
escapeString s = '"' : escapeString' s
instance Show JsonData where
show (JDString s) = escapeString s
show (JDNumber n)
-- Show as an integer if possible, otherwise as a Double.
-- TODO: Not sure if this is the proper way of testing whether a
-- double is an integer value +/- epsilon.
| (fromIntegral (floor n)) == n = show (floor n)
| True = show n
show (JDBool True) = "true"
show (JDBool False) = "false"
show (JDArray l) = "[" ++ concatJoin "," (map show l) ++ "]"
show JDNull = "null"
show (JDObject o) = alistToJsonDict (map (\(k,v) -> (k, show v)) (M.toList o))
--
-- TranslateField class.
--
class TranslateField a where
-- | This method defines the mapping from Haskell record field names
-- to JSON object field names. The default is to strip any initial
-- underscores. Specialize this method to define a different behavior.
translateField :: a -> String -> String
data TranslateFieldD a = TranslateFieldD { translateFieldD :: a -> String -> String }
translateFieldProxy :: Proxy TranslateFieldD
translateFieldProxy = error "'translateFieldProxy' value should never be evaluated!"
instance (TranslateField t) => Sat (TranslateFieldD t) where
dict = TranslateFieldD { translateFieldD = translateField }
-- | Removes initial underscores from a string.
stripInitialUnderscores "" = ""
stripInitialUnderscores ('_':s) = stripInitialUnderscores s
stripInitialUnderscores s = s
instance Typeable a => TranslateField a where
translateField _ x = stripInitialUnderscores x
--
-- ToJson class plus SYB boilerplate.
--
-- | New instances can be added to this class to customize certain aspects
-- of the way in which Haskell types are serialized to JSON.
class TranslateField a => ToJson a where
toJson :: a -> JsonData
-- For lists (same trick used by the Prelude to allow special
-- handling of list types for Show).
lToJson :: [a] -> JsonData
lToJson l = JDArray (map toJson l)
-- | Applies to record types only. You can specialize this method to
-- prevent certain fields from being serialized.
-- Given a Haskell field name, it should return True if that field is
-- to be serialized, and False otherwise.
exclude :: a -> String -> Bool
exclude _ _ = False
-- | Types that will be converted to JSON arrays can override
-- this method to specify additional elements to be prepended to the array.
arrayPrepend :: a -> [JsonData]
arrayPrepend _ = []
-- | Types that will be converted to JSON arrays can override
-- this method to specify additional elements to be appended to the array.
arrayAppend :: a -> [JsonData]
arrayAppend _ = []
-- | Types that will be converted to JSON objects can override
-- this method to specify additional fields of the object.
objectExtras :: a -> [(String, JsonData)]
objectExtras _ = []
-- Note the inclusion of translateField from TranslateField.
data ToJsonD a = ToJsonD { toJsonD :: a -> JsonData,
excludeD :: a -> String -> Bool,
arrayPrependD :: a -> [JsonData],
arrayAppendD :: a -> [JsonData],
objectExtrasD :: a -> [(String, JsonData)],
translateFieldD' :: a -> String -> String }
toJsonProxy :: Proxy ToJsonD
toJsonProxy = error "'toJsonProxy' value should never be evaluated!"
-- Again, note inclusion of translateField from TranslateField.
instance ToJson t => Sat (ToJsonD t) where
dict = ToJsonD { toJsonD = toJson,
excludeD = exclude,
arrayPrependD = arrayPrepend,
arrayAppendD = arrayAppend,
objectExtrasD = objectExtras,
translateFieldD' = translateField }
--
-- Implementations of toJson for different data types.
--
instance ToJson Bool where
toJson b = JDBool b
instance ToJson Int where
toJson i = JDNumber (fromIntegral i)
instance ToJson Integer where
toJson i = JDNumber (fromIntegral i)
--instance Json Float where
-- toJson i = JDNumber (floatToDouble i)
instance ToJson Double where
toJson i = JDNumber i
instance (Integral a, TranslateField a, Typeable a) => ToJson (Ratio a) where
toJson i = JDNumber $ (fromIntegral (numerator i)) / (fromIntegral (denominator i))
instance ToJson Char where
lToJson s = JDString s
toJson c = JDString [c]
instance (Typeable a, ToJson a) => ToJson (Maybe a) where
toJson (Just c) = toJson c
toJson Nothing = JDNull
instance (ToJson a, TranslateField a, Data TranslateFieldD (M.Map String a))
=> ToJson (M.Map String a) where
toJson x = JDObject (M.map toJson x)
instance (ToJson a, TranslateField a, Typeable a) => ToJson [a] where
toJson = lToJson
-- TODO: Add instances for the other array types supported by GHC.
instance (ToJson a, TranslateField a, Typeable a, Typeable i, Ix i) => ToJson (Array i a) where
toJson a = toJson (elems a)
-- | This type can be used for merging two or more records together into a single
-- JSON object. By default, a structure such as (Union X Y) is serialized as follows.
-- First, X and Y are serialized, and a runtime error is signalled if the result of
-- serialization is not a JSON object in both cases. The key/value pairs of the
-- two JSON objects are then merged to form a single object.
data Union a b = Union a b deriving Show
$(derive[''Union]) -- In order to derive (Typeable2 Union).
-- It seems that we get away with overwriting the instance
-- of Data that this creates (if we didn't, we could always
-- instantiate Typeable manually for Union).
-- | Nested Unions are left-branching by convention (since this is what you get
-- by using the constructor as an infix operator).
type Union3 a b c = (Union (Union a b) c)
type Union4 a b c d = (Union (Union3 a b c) d)
type Union5 a b c d e = (Union (Union4 a b c d) e)
type Union6 a b c d e f = (Union (Union5 a b c d e) f)
type Union7 a b c d e f g = (Union (Union6 a b c d e f) g)
type Union8 a b c d e f g h = (Union (Union7 a b c d e f g) h)
type Union9 a b c d e f g h i = (Union (Union8 a b c d e f g h) i)
type Union10 a b c d e f g h i j = (Union (Union9 a b c d e f g h i) j)
-- Used by the (ToJson Union) instance below.
isJDObject (JDObject _) = True
isJDObject _ = False
jdObjectMap (JDObject m) = m
instance (ToJson a, ToJson b, TranslateField a, TranslateField b, Typeable a, Typeable b, Typeable2 Union) => ToJson (Union a b) where
toJson (Union x y) =
let jx = toJson x
jy = toJson y
in
if isJDObject jx && isJDObject jy
then JDObject (M.union (jdObjectMap jx) (jdObjectMap jy))
else error "Bad toJson conversion: Attempt to unify JSON values which aren't both objects"
getFields :: Data ToJsonD a => a -> [String]
getFields = constrFields . (toConstr toJsonProxy)
typename x = dataTypeName (dataTypeOf toJsonProxy x)
-- | This function is used as the the implementation of 'toJson' for the
-- generic instance declaration.
-- It's useful to be able to use the same implentation for
-- other instance declarations which override the default implementations
-- of other methods of the ToJson class.
genericToJson :: (Data ToJsonD a, ToJson a, TranslateField a) => a -> JsonData
genericToJson x
| isAlgType (dataTypeOf toJsonProxy x) =
case getFields x of
[] ->
case gmapQ toJsonProxy (toJsonD dict) x of
[v] -> v -- Special default behavior for algebraic constructors with one field.
vs -> JDArray $ (arrayPrependD dict x) ++ vs ++ (arrayAppendD dict x)
fs ->
let
translatedFsToInclude =
map (translateFieldD' dict x) (filter (not . (excludeD dict x)) (getFields x))
in
JDObject $ M.fromList (objectExtrasD dict x ++ (zip translatedFsToInclude (gmapQ toJsonProxy (toJsonD dict) x)))
| True =
error $ "Unable to serialize the primitive type '" ++ typename x ++ "'"
-- | This function can be used as an implementation of 'toJson' for simple enums.
-- It converts an enum value to a string determined by the name of the constructor,
-- after being fed through the (String -> String) function given as the first argument.
enumToJson :: (Data ToJsonD a, ToJson a, TranslateField a) => (String -> String) -> a -> JsonData
enumToJson transform x
| isAlgType (dataTypeOf toJsonProxy x) = JDString (transform (showConstr (toConstr toJsonProxy x)))
| True = error "Passed non-algebraic type to enumToJson"
instance (Data ToJsonD t, TranslateField t) => ToJson t where
toJson = genericToJson
-- Instances for tuples up to n=7 (this limit it is set by the non-existence of Typeable8).
-- Tuples are converted to (heterogenous) JSON lists.
#define I(x) ToJson x, Typeable x
instance (I(a), I(b)) => ToJson (a, b) where
toJson (a,b) = JDArray [toJson a, toJson b]
instance (I(a), I(b), I(c)) => ToJson (a,b,c) where
toJson (a,b,c) = JDArray [toJson a, toJson b, toJson c]
instance (I(a), I(b), I(c), I(d)) => ToJson (a,b,c,d) where
toJson (a,b,c,d) = JDArray [toJson a, toJson b, toJson c, toJson d]
instance (I(a), I(b), I(c), I(d), I(e)) => ToJson (a,b,c,d,e) where
toJson (a,b,c,d,e) = JDArray [toJson a, toJson b, toJson c, toJson d, toJson e]
instance (I(a), I(b), I(c), I(d), I(e), I(f)) =>
ToJson (a,b,c,d,e,f) where
toJson (a,b,c,d,e,f) = JDArray [toJson a, toJson b, toJson c, toJson d, toJson e,
toJson f]
instance (I(a), I(b), I(c), I(d), I(e), I(f), I(g)) =>
ToJson (a,b,c,d,e,f,g) where
toJson (a,b,c,d,e,f,g) = JDArray [toJson a, toJson b, toJson c, toJson d, toJson e,
toJson f, toJson g]
#undef I
--
-- FromJson
--
class TranslateField a => FromJson a where
fromJson :: a -> JsonData -> Either String a
-- For lists (same trick used by the Prelude to allow special
-- handling of list types for Show).
lFromJson :: a -> JsonData -> Either String [a]
lFromJson dummy (JDArray l) = mapM (fromJson dummy) l
-- | To specify default values for the required fields of a JSON object,
-- specialize this method in the instance definition for the relevant
-- datatype.
objectDefaults :: a -> M.Map String JsonData
objectDefaults _ = M.empty
data FromJsonD a = FromJsonD { fromJsonD :: a -> JsonData -> Either String a,
objectDefaultsD :: a -> M.Map String JsonData,
translateFieldD'' :: a -> String -> String }
fromJsonProxy :: Proxy FromJsonD
fromJsonProxy = error "'fromJsonProxy' should never be evaluated!"
-- Note inclusion of translateField from TranslateField.
instance FromJson t => Sat (FromJsonD t) where
dict = FromJsonD { fromJsonD = fromJson,
objectDefaultsD = objectDefaults,
translateFieldD'' = translateField }
instance FromJson Char where
fromJson _ (JDString [c]) = Right c
fromJson _ _ = Left "Bad fromJson conversion: JSON string not of length 1 to 'Char'"
lFromJson _ (JDString s) = Right s
lFromJson _ _ = Left "Bad fromJson conversion: Non-string to 'String'"
instance (FromJson a, TranslateField a, Typeable a) => FromJson (Maybe a) where
fromJson _ JDNull = Right Nothing
fromJson _ y =
case fromJson undefined y of
Left err -> Left err
Right v -> Right $ Just v
instance (FromJson a, TranslateField a, Typeable a) => FromJson [a] where
fromJson _ x = lFromJson undefined x
instance FromJson Int where
fromJson _ (JDNumber n)
| (fromIntegral (floor n)) == n = Right (floor n)
| True =
Left "Bad fromJson conversion: number does not approximate an integer ('Int')"
fromJson _ _ = Left "Bad fromJson conversion: Non-numeric to 'Int'"
instance FromJson Integer where
fromJson _ (JDNumber n)
| (fromIntegral (floor n)) == n = Right (floor n)
| True =
Left "Bad fromJson conversion: number does not approximate an integer ('Integer')"
fromJson _ _ = Left "Bad fromJson conversion: Non-numeric to 'Integer'"
instance FromJson Double where
fromJson _ (JDNumber d) = Right d
fromJson _ _ = Left "Bad fromJson conversion: Non-numeric to 'Double'"
instance (Typeable a, Integral a) => FromJson (Ratio a) where
fromJson _ (JDNumber i) = Right (fromRational (toRational i))
fromJson _ _ = Left "Bad fromJson conversion: Non-numeric to instance of 'Ratio'"
instance FromJson Bool where
fromJson _ (JDBool b) = Right b
fromJson _ _ = Left "Bad fromJson conversion: Non-boolean to 'Bool'"
-- TODO: Use monads instead of 'ifs' if possible (funky type errors
-- which I haven't figured out yet, something to do with monomorphism
-- in let bindings vs. lambda abstraction?).
instance (FromJson a, FromJson b, Typeable a, Typeable b, TranslateField a, TranslateField b) => FromJson (Union a b) where
fromJson _ o@(JDObject _) =
let r1 = fromJson undefined o
r2 = fromJson undefined o
in
if isRight r1 && isRight r2
then Right $ Union (fromRight r1) (fromRight r2)
else Left "Bad fromJson conversion: error constructing subpart of union (did not serialize to object)"
fromJson _ _ = Left "Bad fromJson conversion: attempt to convert non-object to Union"
tuperror :: Int -> Either String a
tuperror n = Left $ Printf.printf "Bad fromJson conversion: attempt to convert something that was not a list of length %i to a %i-tuple" n n
#define I(x) FromJson x, Typeable x, TranslateField x
instance (I(a), I(b)) => FromJson (a,b) where
fromJson _ (JDArray [x1,x2]) = do
r1 <- fromJson undefined x1
r2 <- fromJson undefined x2
return (r1,r2)
fromJson _ _ = tuperror 2
instance (I(a), I(b), I(c)) => FromJson (a,b,c) where
fromJson _ (JDArray [x1,x2,x3]) = do
r1 <- fromJson undefined x1
r2 <- fromJson undefined x2
r3 <- fromJson undefined x3
return (r1,r2,r3)
fromJson _ _ = tuperror 3
instance (I(a), I(b), I(c), I(d)) => FromJson(a,b,c,d) where
fromJson _ (JDArray [x1,x2,x3,x4]) = do
r1 <- fromJson undefined x1
r2 <- fromJson undefined x2
r3 <- fromJson undefined x3
r4 <- fromJson undefined x4
return (r1,r2,r3,r4)
fromJson _ _ = tuperror 4
instance (I(a), I(b), I(c), I(d), I(e)) => FromJson (a,b,c,d,e) where
fromJson _ (JDArray [x1,x2,x3,x4,x5]) = do
r1 <- fromJson undefined x1
r2 <- fromJson undefined x2
r3 <- fromJson undefined x3
r4 <- fromJson undefined x4
r5 <- fromJson undefined x5
return (r1,r2,r3,r4,r5)
fromJson _ _ = tuperror 5
instance (I(a), I(b), I(c), I(d), I(e), I(f)) =>
FromJson (a,b,c,d,e,f) where
fromJson _ (JDArray [x1,x2,x3,x4,x5,x6]) = do
r1 <- fromJson undefined x1
r2 <- fromJson undefined x2
r3 <- fromJson undefined x3
r4 <- fromJson undefined x4
r5 <- fromJson undefined x5
r6 <- fromJson undefined x6
return (r1,r2,r3,r4,r5,r6)
fromJson _ _ = tuperror 6
instance (I(a), I(b), I(c), I(d), I(e), I(f), I(g)) =>
FromJson (a,b,c,d,e,f,g) where
fromJson _ (JDArray [x1,x2,x3,x4,x5,x6,x7]) = do
r1 <- fromJson undefined x1
r2 <- fromJson undefined x2
r3 <- fromJson undefined x3
r4 <- fromJson undefined x4
r5 <- fromJson undefined x5
r6 <- fromJson undefined x6
r7 <- fromJson undefined x7
return (r1,r2,r3,r4,r5,r6,r7)
fromJson _ _ = tuperror 7
#undef I
elemsOfMap :: Ord k => M.Map k v -> [k] -> Maybe [v]
elemsOfMap _ [] = Just []
elemsOfMap m (x:xs) = do
r <- M.lookup x m
rs <- elemsOfMap m xs
return (r : rs)
type ErrorWithState e s a = ErrorT e (State s) a
-- TODO: Not a very descriptive name. Oh well...
m1 :: (Data FromJsonD a) => ErrorWithState String [JsonData] a
m1 = do
jvl <- lift get
(case jvl of
[] -> throwError "Bad fromJson conversion: Not enough elements in JSON array to satisfy constructor"
(jv:jvs) -> do
lift $ put jvs
(case fromJsonD dict (undefined :: a) jv of
Left e -> throwError e
Right x -> return x))
-- TODO: Again, uninformative name.
-- TODO: Some code duplication here.
m2 :: (Data FromJsonD a, TranslateField a) => M.Map String JsonData -> (String -> String) -> a -> ErrorWithState String (M.Map String JsonData, [String]) a
m2 defaults transFunc dummy = do
(m, sl) <- lift get
(case sl of
[] -> throwError "Bad fromJson conversion: Not enough fields in JSON object to satisfy constructor"
(f:fs) -> do
lift $ put (m, fs)
let stripped = transFunc f
(case M.lookup stripped m of
Nothing ->
case M.lookup stripped defaults of
Nothing -> throwError $ "Bad fromJson conversion: Required field not present in JSON object: " ++ stripped
Just v ->
case fromJsonD dict dummy v of
Left e -> throwError e
Right x -> return x
Just v ->
case fromJsonD dict dummy v of
Left e -> throwError e
Right x -> return x))
-- TODO: Another uninformative name.
m3 :: (Data FromJsonD a, TranslateField a) => JsonData -> a -> ErrorWithState String Int a
m3 jsondata dummy = do
s <- get
if s > 0
then throwError "Bad fromJson conversion: Expecting JSON object or array; did not attempt automatic boxing because constructor takes more than one argument."
else do
put (s + 1)
case fromJsonD dict dummy jsondata of
Left e -> throwError e
Right x -> return x
genericFromJson :: (Data FromJsonD a, FromJson a, TranslateField a) => a -> JsonData -> Either String a
genericFromJson dummy (JDArray l) =
case datarep (dataTypeOf fromJsonProxy dummy) of
AlgRep ccs@(c:cs) -> evalArrayConstr ccs
where
evalArrayConstr = tryHead err . dropWhile isLeft . map es
es :: (Data FromJsonD a, FromJson a) => Constr -> Either String a
es c = evalState (runErrorT (fromConstrM fromJsonProxy m1 c)) (tryTail l)
tryTail = cond null (const []) tail
tryHead def = cond null (const def) head
err = Left "Bad fromJson conversion: Type with no constructors!"
AlgRep _ -> Left "Bad fromJson conversion: Type with no constructors!"
_ -> Left "Bad fromJson conversion: Non-algebraic datatype given to 'genericFromJson'"
genericFromJson dummy (JDObject m) =
case datarep (dataTypeOf fromJsonProxy dummy) of
AlgRep cs@(_:_) -> evalConstrs dummy m cs
_ -> Left "Bad fromJson conversion: Non-algebraic datatype given to 'genericFromJson'"
genericFromJson dummy jsondata =
case datarep (dataTypeOf fromJsonProxy dummy) of
AlgRep [c] -> evalState (runErrorT (gmapM fromJsonProxy (m3 jsondata) (fromConstr fromJsonProxy c))) 0
AlgRep _ -> Left "Bad fromJson conversion: Expecting JSON object or array; did not attempt automatic boxing because type has more than one constructor."
genericFromJson _ _ = Left "Bad fromJson conversion: Expecting JSON object or array"
evalConstrs :: (Data FromJsonD a, FromJson a) => a -> M.Map String JsonData -> [Constr] -> Either [Char] a
evalConstrs dummy m = tryHead err . dropWhile isLeft . map (evalConstr dummy m)
where
tryHead def = cond null (const def) head
err = Left "Bad fromJson conversion: Type with no constructors!"
evalConstr :: (Data FromJsonD a, FromJson a) => a -> M.Map String JsonData -> Constr -> Either [Char] a
evalConstr dummy m c = case constrFields c of
[] -> Left $ "Bad fromJson conversion: Attempt to convert JDObect to a non-record algebraic type"
-- TODO:
-- Can't use fromConstrM because we need to get dummy values of the
-- appropriate type for each argument of the constructor. This is unfortunate,
-- becuase it means that we get runtime errors for records with strict fields.
fs -> evalState (runErrorT (gmapM fromJsonProxy (m2 (objectDefaultsD dict dummy) (translateFieldD'' dict dummy)) (fromConstr fromJsonProxy c))) (m, fs)
constrNames :: (Data FromJsonD a, Data TranslateFieldD a) => a -> [String]
constrNames x = map showConstr (dataTypeConstrs (dataTypeOf fromJsonProxy x))
-- | The counterpart of 'enumToJson'.
enumFromJson :: (Data FromJsonD a, Data TranslateFieldD a) => (String -> String) -> a -> JsonData -> Either String a
enumFromJson transform dummy (JDString s) =
let cname = (transform s) in
if elem cname (constrNames dummy)
then
case fromConstrM fromJsonProxy Nothing (mkConstr (dataTypeOf fromJsonProxy dummy) cname [] Prefix ) of
Nothing -> Left "Error in enumFromJson"
Just x -> Right x
else Left "Constructor name not recognized in enumFromJson"
enumFromJson _ _ _ = Left "Non-string given to enumFromJson"
instance (Data FromJsonD t, TranslateField t) => FromJson t where
fromJson = genericFromJson
--
-- JSON parser.
--
-- Determine the unicode encoding of a byte stream
-- on the assumption that it begins with two ASCII characters.
getEncoding :: B.ByteString -> EncodingName
getEncoding s
| B.length s < 4 = "UTF-8" -- If the string is shorter than 4 bytes,
-- we have no way of determining the encoding.
| True =
let bs1 = B.index s 0
bs2 = B.index s 1
bs3 = B.index s 2
bs4 = B.index s 3
in
-- Little endian UTF 32/16.
if bs1 /= 0 && bs2 == 0 && bs3 == 0 && bs4 == 0
then "UTF-32LE"
else if bs1 /= 0 && bs2 == 0 && bs3 /= 0 && bs4 == 0
then "UTF-16LE"
-- Big endian UTF 32/16.
else if bs1 == 0 && bs2 == 0 && bs3 == 0 && bs4 /= 0
then "UTF-32BE"
else if bs1 == 0 && bs2 /= 0 && bs3 == 0 && bs4 /= 0
then "UTF-16BE"
-- UTF-8
else if bs1 /= 0 && bs2 /= 0 && bs3 /= 0 && bs4 /= 0
then "UTF-8" -- BOM allowed but not required for UTF-8.
-- If we can't figure it out, guess at UTF-8.
else "UTF-8"
-- Converts a ByteString to a String of unicode code points.
toHaskellString :: EncodingName -> B.ByteString -> String
toHaskellString enc source =
stripBOM $ map chr (pairBytes (B.unpack bs))
where
pairBytes :: [W.Word8] -> [Int]
pairBytes [] = []
pairBytes (c:c':cs) = ((fromIntegral c) + (fromIntegral c')*256) : (pairBytes cs)
bs = convertFuzzy Discard enc "UTF-16LE" source
stripBOM :: String -> String
stripBOM ('\0':'\0':'\xFE':'\xFF':cs) = cs
stripBOM ('\xFF':'\xFE':'\0':'\0':cs) = cs
stripBOM ('\xFE':'\xFF':cs) = cs
stripBOM ('\xFF':'\xFE':cs) = cs
stripBOM ('\xEF':'\xBB':'\xBF':cs) = cs
stripBOM cs = cs
(<|>) = (P.<|>)
-- | Converts a ByteString to an instance of JsonData (unicode encoding
-- is detected automatically).
parseJsonByteString :: B.ByteString -> Either String JsonData
parseJsonByteString bs =
let
decoded = toHaskellString (getEncoding bs) bs
in
case P.runParser (ws >> jsonValue) () "" decoded of
Left e -> Left (show e)
Right x -> Right x
-- | Converts a String (interpreted as a true unicode String) to an instance
-- of JsonData.
parseJsonString :: String -> Either String JsonData
parseJsonString s =
case P.runParser (ws >> jsonValue) () "" s of
Left e -> Left (show e)
Right x -> Right x
apply f p = do
r <- p
return (f r)
pconcat p1 p2 = do
l1 <- p1
l2 <- p2
return $ l1 ++ l2
listify :: P.Parser x -> P.Parser [x]
listify = apply (:[])
ws = P.many (P.oneOf [' ','\r','\n','\t','\f','\v'])
-- Could use the ParsecToken module, but trying a floating point number
-- then an integer is a bit inefficient (especially since integers will
-- be more common).
number :: P.Parser JsonData
number = do
neg <- (P.char '-' >> return True) <|> return False
i <- P.many1 P.digit
point <- P.option Nothing (apply Just (P.char '.' >> P.many1 P.digit))
exponent <- P.option Nothing (apply Just (P.char 'e' >> pconcat (P.option "" (listify (P.char '-'))) (P.many1 P.digit)))
let n = if point == Nothing && exponent == Nothing
then read i :: Double
else read (i ++ (if point == Nothing then "" else "." ++ fromJust point) ++
(if exponent == Nothing then "" else "e" ++ fromJust exponent)) :: Double
return . JDNumber $ if neg then negate n else n
stringChar :: Char -> P.Parser Char
stringChar opener = do
-- Fail immediately on either single or double quotes or
-- on control characters.
c <- P.satisfy (\c -> c /= opener && (ord c) > 31)
(case c of
'\\' ->
(P.char '"' >> return '"') <|>
(P.char '\'' >> return '\'') <|>
(P.char 'b' >> return '\b') <|>
(P.char 'f' >> return '\f') <|>
(P.char 'n' >> return '\n') <|>
(P.char 'r' >> return '\r') <|>
(P.char 't' >> return '\t') <|>
(do
P.char 'u'
ds <- P.count 4 P.hexDigit
return $ chr (read ("0x" ++ ds) :: Int)) <|>
(P.satisfy allowed >>= return) -- "\X" == "X" by default.
c -> return c)
string :: P.Parser String
string = do
opener <- P.char '"' <|> P.char '\'' -- JSON spec requires double quotes, but we'll be lenient.
cs <- P.many (stringChar opener)
P.char opener
return cs
jsonString = apply JDString string
kvp :: P.Parser (String, JsonData)
kvp = do
s <- string
ws
P.char ':'
ws
v <- jsonValue
return (s, v)
lexeme :: P.Parser a -> P.Parser a
lexeme p = do
r <- p
ws
return r
jsonArray :: P.Parser JsonData
jsonArray = do
P.char '['
ws
vs <- P.sepBy (lexeme jsonValue) (P.char ',' >> ws)
ws
P.char ']'
return $ JDArray vs
object :: P.Parser JsonData
object = do
P.char '{'
ws
kvps <- P.sepBy (lexeme kvp) (P.char ',' >> ws)
ws
P.char '}'
return $ JDObject $ M.fromList kvps
boolean :: P.Parser JsonData
boolean = (P.try (P.string "true") >> return (JDBool True)) <|>
(P.string "false" >> return (JDBool False))
jsonNull :: P.Parser JsonData
jsonNull = P.string "null" >> return JDNull
jsonValue = number <|> jsonString <|> jsonArray <|> object <|> boolean <|> jsonNull
--
-- Some other utilities.
--
-- | Converts a JSON String (interpreted as a true unicode string) to
-- a value of the type given by the first (dummy) argument.
fromJsonString :: FromJson a => a -> String -> Either String a
fromJsonString dummy s =
case parseJsonString s of
Left e -> Left (show e)
Right js ->
case fromJson dummy js of
Left e -> Left e
Right js -> Right js
-- | Converts a JSON ByteString (with unicode encoding automatically detected)
-- to a value of the type given by the first (dummy) argument.
fromJsonByteString :: FromJson a => a -> B.ByteString -> Either String a
fromJsonByteString dummy s =
case parseJsonByteString s of
Left e -> Left (show e)
Right js ->
case fromJson dummy js of
Left e -> Left e
Right js -> Right js
-- | Converts a value to an ASCII-only JSON String.
toJsonString :: ToJson a => a -> String
toJsonString = show . toJson
--
-- A couple of utility functions.
--
-- | Converts the first character of a string to upper case.
firstCharToUpper :: String -> String
firstCharToUpper "" = ""
firstCharToUpper (c:cs) = (toUpper c) : cs
-- | Converts the first character of a string to lower case.
firstCharToLower :: String -> String
firstCharToLower "" = ""
firstCharToLower (c:cs) = (toLower c) : cs
isLeft :: Either a b -> Bool
isLeft (Left _) = True
isLeft _ = False
isRight :: Either a b -> Bool
isRight (Right _) = True
isRight _ = False
fromLeft :: Either a b -> a
fromLeft (Left x) = x
fromRight :: Either a b -> b
fromRight (Right x) = x
cond :: (a -> Bool) -> (a -> b) -> (a -> b) -> a -> b
cond p th el a = if p a then th a else el a
| addrummond/RJson | Text/RJson.hs | bsd-3-clause | 31,901 | 304 | 37 | 8,781 | 9,536 | 5,084 | 4,452 | -1 | -1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Types
( Digit
, zero
, one
, two
, three
, four
, five
, six
, seven
, eight
, nine
, Account
, fromList
, Verified
, verify
, verified
) where
newtype Digit = Digit { unDigit :: Int } deriving (Enum, Eq, Ord)
instance Show Digit where
show = show . unDigit
zero, one, two, three, four, five, six, seven, eight, nine :: Digit
zero = Digit 0
one = Digit 1
two = Digit 2
three = Digit 3
four = Digit 4
five = Digit 5
six = Digit 6
seven = Digit 7
eight = Digit 8
nine = Digit 9
newtype Account = Account { account :: [Digit] } deriving (Eq)
instance Show Account where
show = concatMap show . account
fromList :: [Digit] -> Maybe Account
fromList ds | length ds == 9 = Just $ Account ds
| otherwise = Nothing
newtype Verified = Verified { verified :: Account } deriving (Eq)
instance Show Verified where
show = show . verified
verify :: Account -> Maybe Verified
verify a | isValid a = Just $ Verified a
| otherwise = Nothing
isValid :: Account -> Bool
isValid (Account ds) =
(sum $ zipWith (*) [9,8..1] (map unDigit ds)) `mod` 11 == 0
| mbeidler/kata-bank-ocr | src/Types.hs | bsd-3-clause | 1,215 | 0 | 11 | 348 | 445 | 251 | 194 | 47 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Data.SemVer.Range (
-- * Types
RangeOp(..)
, RangeSpec(..)
, Version(..)
, version
, VersionRange
, range
, Identifier(..)
, LatticeSyntax(..)
, (/\)
, (\/)
-- * Parsers
, parseVersion
, parseVersionRange
) where
import Control.Applicative
import Control.DeepSeq
import Control.Monad
import Control.Monad.Trans.State
import Data.Data
import Data.Foldable
import Data.String
import Data.Char
import Data.Traversable
import GHC.Generics
import Prelude hiding (or, all)
import Text.Regex.Applicative as RE
import Text.Regex.Applicative.Common as RE
eitherToMaybe :: Either e a -> Maybe a
eitherToMaybe (Right x) = Just x
eitherToMaybe (Left _) = Nothing
-- Semver
data Version = Version
{ _versionMajor :: !Int
, _versionMinor :: !Int
, _versionPatch :: !Int
, _versionRelease :: [Identifier]
} deriving (Eq, Ord, Show, Typeable, Data, Generic)
-- todo hashable
version :: Int -> Int -> Int -> Version
version x y z = Version x y z []
versionR :: RE Char Version
versionR = uncurry4 Version <$> threeR
parseVersion :: String -> Maybe Version
parseVersion = RE.match versionR
data Identifier = INum !Int
| IStr !String
deriving (Eq, Ord, Show, Data, Typeable, Generic)
-- TODO: hashable
instance IsString Identifier where
fromString str
| all isDigit str = INum $ read str
| otherwise = IStr str
identifiers :: RE Char [Identifier]
identifiers = maybe [] id <$> optional (identifiers')
identifiers' :: RE Char [Identifier]
identifiers' = (:) <$ sym '-' <*> identifier <*> many (sym '.' *> identifier)
identifier :: RE Char Identifier
identifier = INum <$> decimal <|> IStr <$> many (psym (flip Prelude.elem $ '-' : ['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9']))
---
data RangeOp = ROLT -- ^ @<@
| ROLE -- ^ @<=@
| ROGT -- ^ @>@
| ROGE -- ^ @>=@
| ROEQ -- ^ @=@
deriving (Eq, Ord, Show, Read, Typeable, Data, Generic)
instance NFData RangeOp
data LatticeSyntax a = LVar a
| LBound Bool
| LJoin (LatticeSyntax a) (LatticeSyntax a)
| LMeet (LatticeSyntax a) (LatticeSyntax a)
deriving (Eq, Ord, Read, Show, Functor, Foldable, Traversable, Typeable, Data)
infixr 3 /\
infixr 2 \/
-- | Infix version of 'LMeet'
(/\) :: LatticeSyntax a -> LatticeSyntax a -> LatticeSyntax a
(/\) = LMeet
-- | Infix version of 'LJoin'
(\/) :: LatticeSyntax a -> LatticeSyntax a -> LatticeSyntax a
(\/) = LJoin
instance Applicative LatticeSyntax where
pure = return
(<*>) = ap
instance Monad LatticeSyntax where
return = LVar
LVar x >>= f = f x
LBound b >>= _ = LBound b
LJoin a b >>= f = LJoin (a >>= f) (b >>= f)
LMeet a b >>= f = LMeet (a >>= f) (b >>= f)
freeVars :: LatticeSyntax a -> [a]
freeVars = toList
dual :: LatticeSyntax a -> LatticeSyntax a
dual (LVar v) = LVar v
dual (LBound t) = LBound $ not t
dual (LJoin a b) = LMeet (dual a) (dual b)
dual (LMeet a b) = LJoin (dual a) (dual b)
-- | Test for equivalence.
--
-- >>> equivalent (LMeet (LVar 'a') (LVar 'b')) (LMeet (LVar 'b') (LVar 'a'))
-- True
--
-- >>> equivalent (LVar 'a') (LMeet (LVar 'a') (LVar 'a'))
-- True
--
-- >>> equivalent (LMeet (LVar 'a') (LVar 'b')) (LMeet (LVar 'b') (LVar 'b'))
-- False
equivalent :: Eq a => LatticeSyntax a -> LatticeSyntax a -> Bool
equivalent a b = all (uncurry (==)) . runEval $ p
where p = (,) <$> evalLattice a <*> evalLattice b
-- | Test for preorder.
--
-- @ a ≤ b ⇔ a ∨ b ≡ b ⇔ a ≡ a ∧ b @
--
-- >>> preorder (LVar 'a' `LMeet` LVar 'b') (LVar 'a')
-- True
--
-- >>> preorder (LVar 'a') (LVar 'a' `LMeet` LVar 'b')
-- False
preorder :: Eq a => LatticeSyntax a -> LatticeSyntax a -> Bool
preorder a b = (a `LJoin` b) `equivalent` b
-- | Return `True` if for some variable assigment expression evaluates to `True`.
satisfiable :: Eq a => LatticeSyntax a -> Bool
satisfiable = or . runEval . evalLattice
newtype Eval v a = Eval { unEval :: StateT [(v, Bool)] [] a }
deriving (Functor, Applicative, Alternative, Monad, MonadPlus)
runEval :: Eval v a -> [a]
runEval act = evalStateT (unEval act) []
evalLattice :: Eq v => LatticeSyntax v -> Eval v Bool
evalLattice (LVar v) = guess v
evalLattice (LBound b) = return b
evalLattice (LJoin a b) = evalLattice a ||^ evalLattice b
evalLattice (LMeet a b) = evalLattice a &&^ evalLattice b
guess :: Eq v => v -> Eval v Bool
guess v = Eval $ do
st <- get
let remember b = put ((v, b) : st) >> return b
case lookup v st of
Just b -> return b
Nothing -> remember True <|> remember False
-- From Control.Monad.Extra of extra
-- | Like @if@, but where the test can be monadic.
ifM :: Monad m => m Bool -> m a -> m a -> m a
ifM b t f = do b' <- b; if b' then t else f
-- | The lazy '||' operator lifted to a monad. If the first
-- argument evaluates to 'True' the second argument will not
-- be evaluated.
--
-- > Just True ||^ undefined == Just True
-- > Just False ||^ Just True == Just True
-- > Just False ||^ Just False == Just False
(||^) :: Monad m => m Bool -> m Bool -> m Bool
(||^) a b = ifM a (return True) b
-- | The lazy '&&' operator lifted to a monad. If the first
-- argument evaluates to 'False' the second argument will not
-- be evaluated.
--
-- > Just False &&^ undefined == Just False
-- > Just True &&^ Just True == Just True
-- > Just True &&^ Just False == Just False
(&&^) :: Monad m => m Bool -> m Bool -> m Bool
(&&^) a b = ifM a b (return False)
--
data RangeSpec = RS !RangeOp !Version
deriving (Eq, Ord, Show, Typeable, Data, Generic)
type VersionRange = LatticeSyntax RangeSpec
range :: RangeOp -> Version -> VersionRange
range op v = pure (RS op v)
fullRange :: VersionRange
fullRange = range ROGE (version 0 0 0)
-- Range parser
scalarRangeR :: RE Char VersionRange
scalarRangeR = ge <|> gt <|> lt <|> le <|> eq
where ge = LVar . RS ROGE <$ RE.string ">=" <*> versionR
gt = LVar . RS ROGT <$ RE.string ">" <*> versionR
le = LVar . RS ROLE <$ RE.string "<=" <*> versionR
lt = LVar . RS ROLT <$ RE.string "<" <*> versionR
eq = LVar . RS ROEQ <$> versionR
separatedBy :: (a -> a -> a) -> RE c a -> RE c () -> RE c a
separatedBy f re sep = foldl' f <$> re <*> many (sep *> re)
ws :: RE Char ()
ws = void $ some $ psym isSpace
conR :: RE Char VersionRange
conR = separatedBy (/\) scalarRangeR ws
disR :: RE Char VersionRange
disR = separatedBy (\/) conR (ws *> string "||" *> ws)
starR :: RE Char VersionRange
starR = fullRange <$ string "*"
xRange1R :: RE Char VersionRange
xRange1R = f <$> RE.decimal <* sym '.' <*> RE.decimal <* string ".x"
where f x y = range ROGE (version x y 0) /\ range ROLT (version x (y + 1) 0)
xRange2R :: RE Char VersionRange
xRange2R = f <$> RE.decimal <* string ".x"
where f x = range ROGE (version x 0 0) /\ range ROLT (version (x + 1) 0 0)
xRange3R :: RE Char VersionRange
xRange3R = f <$> RE.decimal <* string ".x.x"
where f x = range ROGE (version x 0 0) /\ range ROLT (version (x + 1) 0 0)
threeR :: RE Char (Int, Int, Int, [Identifier])
threeR = (,,,) <$> RE.decimal <* sym '.' <*> RE.decimal <* sym '.' <*> RE.decimal <*> identifiers
uncurry4 :: (a -> b -> c -> d -> e) -> (a, b, c, d) -> e
uncurry4 f (a,b,c,d) = f a b c d
{- INLINE uncurry4 -}
twoR :: RE Char (Int, Int)
twoR = (,) <$> RE.decimal <* sym '.' <*> RE.decimal
oneR :: RE Char Int
oneR = RE.decimal
partial1R :: RE Char VersionRange
partial1R = uncurry f <$> twoR
where f x y = range ROGE (version x y 0) /\ range ROLT (version x (y + 1) 0)
partial2R :: RE Char VersionRange
partial2R = f <$> oneR
where f x = range ROGE (version x 0 0) /\ range ROLT (version (x + 1) 0 0)
tilde1R :: RE Char VersionRange
tilde1R = uncurry4 f <$ sym '~' <*> threeR
where f 0 0 z i = range ROGE (Version 0 0 z i) /\ range ROLT (version 0 0 (z + 1))
f 0 y z i = range ROGE (Version 0 y z i) /\ range ROLT (version 0 (y + 1) 0)
f x y z i = range ROGE (Version x y z i) /\ range ROLT (version x (y + 1) 0)
tilde2R :: RE Char VersionRange
tilde2R = uncurry f <$ sym '~' <*> twoR
where f x y = range ROGE (version x y 0) /\ range ROLT (version x (y + 1) 0)
tilde3R :: RE Char VersionRange
tilde3R = f <$ sym '~' <*> oneR
where f x = range ROGE (version x 0 0) /\ range ROLT (version (x + 1) 0 0)
caret1R :: RE Char VersionRange
caret1R = uncurry4 f <$ sym '^' <*> threeR
where f 0 0 z i = range ROGE (Version 0 0 z i) /\ range ROLT (version 0 0 (z + 1))
f 0 y z i = range ROGE (Version 0 y z i) /\ range ROLT (version 0 (y + 1) 0)
f x y z i = range ROGE (Version x y z i) /\ range ROLT (version (x + 1) 0 0)
caret2R :: RE Char VersionRange
caret2R = uncurry f <$ sym '^' <*> twoR <* optional (string ".x")
where f 0 y = range ROGE (version 0 y 0) /\ range ROLT (version 0 (y + 1) 0)
f x y = range ROGE (version x y 0) /\ range ROLT (version (x + 1) 0 0)
caret3R :: RE Char VersionRange
caret3R = f <$ sym '^' <*> oneR <* string ".x"
where f x = range ROGE (version x 0 0) /\ range ROLT (version (x + 1) 0 0)
choose :: Alternative f => [f a] -> f a
choose = Data.Foldable.foldr (<|>) empty
hyphenR :: RE Char VersionRange
hyphenR = (/\) <$> hyphenLoR <* ws <* sym '-' <* ws <*> hyphenHiR
hyphenLoR :: RE Char VersionRange
hyphenLoR = h1 <|> h2 <|> h3
where h1 = range ROGE <$> versionR
h2 = uncurry (\x y -> range ROGE (version x y 0)) <$> twoR
h3 = (\x -> range ROGE (version x 0 0)) <$> oneR
hyphenHiR :: RE Char VersionRange
hyphenHiR = h1 <|> h2 <|> h3
where h1 = range ROLE <$> versionR
h2 = uncurry (\x y -> range ROLT (version x (y + 1) 0)) <$> twoR
h3 = (\x -> range ROLT (version (x + 1) 0 0)) <$> oneR
advandedRangeR :: RE Char VersionRange
advandedRangeR = choose
[ xRange1R
, xRange2R
, xRange3R
, partial1R
, partial2R
, tilde1R
, tilde2R
, tilde3R
, caret1R
, caret2R
, caret3R
, hyphenR
]
rangeR :: RE Char VersionRange
rangeR = disR <|> starR <|> advandedRangeR <|> pure fullRange
parseVersionRange :: String -> Maybe VersionRange
parseVersionRange = RE.match rangeR
| phadej/semver-range | src/Data/SemVer/Range.hs | bsd-3-clause | 10,363 | 0 | 15 | 2,465 | 4,066 | 2,108 | 1,958 | 240 | 3 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE OverloadedStrings #-}
module MathFlow.Core where
import GHC.TypeLits
import Data.Singletons
import Data.Singletons.TH
import Data.Promotion.Prelude
-- |IsSubSamp // Subsampling constraint
--
-- * (f :: [Nat]) // strides for subsampling
-- * (m :: [Nat]) // dimensions of original tensor
-- * (n :: [Nat]) // dimensions of subsampled tensor
-- * :: Bool
type family IsSubSamp (f :: [Nat]) (m :: [Nat]) (n :: [Nat]) :: Bool where
IsSubSamp (1:fs) (m:ms) (n:ns) = IsSubSamp fs ms ns
IsSubSamp (f:fs) (m:ms) (n:ns) = ((n * f) :== m) :&& (IsSubSamp fs ms ns)
IsSubSamp '[] '[] '[] = 'True
IsSubSamp _ _ _ = 'False
-- |IsMatMul // A constraint for matrix multiplication
--
-- * (m :: [Nat]) // dimensions of a[..., i, k]
-- * (o :: [Nat]) // dimensions of b[..., k, j]
-- * (n :: [Nat]) // dimensions of output[..., i, j] = sum_k (a[..., i, k] * b[..., k, j]), for all indices i, j.
-- * :: Bool
type family IsMatMul (m :: [Nat]) (o :: [Nat]) (n :: [Nat]) :: Bool where
IsMatMul m o n =
Last n :== Last o :&&
Last m :== Head (Tail (Reverse o)) :&&
(Tail (Reverse n)) :== (Tail (Reverse m)) :&&
(Tail (Tail (Reverse n))) :== (Tail (Tail (Reverse o)))
-- |IsConcat // A constraint for concatination of tensor
--
-- * (m :: [Nat]) // dimensions of a[..., i, ...]
-- * (o :: [Nat]) // dimensions of b[..., k, ...]
-- * (n :: [Nat]) // dimensions of output[..., i+k, ...] = concat (a,b)
-- * :: Bool
type family IsConcat (m :: [Nat]) (o :: [Nat]) (n :: [Nat]) :: Bool where
IsConcat (m:mx) (o:ox) (n:nx) = (m :== o :&& m:== n :|| m + o :== n) :&& IsConcat mx ox nx
IsConcat '[] '[] '[] = 'True
IsConcat _ _ _ = 'False
-- |IsSameProduct // A constraint for reshaping tensor
--
-- * (m :: [Nat]) // dimensions of original tensor
-- * (n :: [Nat]) // dimensions of reshaped tensor
-- * :: Bool
type family IsSameProduct (m :: [Nat]) (n :: [Nat]) :: Bool where
IsSameProduct (m:mx) (n:nx) = m :== n :&& (Product mx :== Product nx)
IsSameProduct mx nx = Product mx :== Product nx
-- |Dependently typed tensor model
--
-- This model includes basic arithmetic operators and tensorflow functions.
data Tensor (n::[Nat]) t a =
(Num t) => TScalar t -- ^ Scalar value
| Tensor a -- ^ Transform a value to dependently typed value
| TAdd (Tensor n t a) (Tensor n t a) -- ^ + of Num
| TSub (Tensor n t a) (Tensor n t a) -- ^ - of Num
| TMul (Tensor n t a) (Tensor n t a) -- ^ * of Num
| TAbs (Tensor n t a) -- ^ abs of Num
| TSign (Tensor n t a) -- ^ signum of Num
| TRep (Tensor (Tail n) t a) -- ^ vector wise operator
| TTr (Tensor (Reverse n) t a) -- ^ tensor tansporse operator
| forall o m. (SingI o,SingI m,SingI n,IsMatMul m o n ~ 'True) => TMatMul (Tensor m t a) (Tensor o t a) -- ^ matrix multiply
| forall o m. (SingI o,SingI m,SingI n,IsConcat m o n ~ 'True) => TConcat (Tensor m t a) (Tensor o t a) -- ^ concat operator
| forall m. (SingI m,IsSameProduct m n ~ 'True) => TReshape (Tensor m t a) -- ^ reshape function
| forall o m.
(SingI o,SingI m,
Last n ~ Last o,
Last m ~ Head (Tail (Reverse o)),
(Tail (Reverse n)) ~ (Tail (Reverse m))
) =>
TConv2d (Tensor m t a) (Tensor o t a) -- ^ conv2d function
| forall f m. (SingI f, SingI m,IsSubSamp f m n ~ 'True) => TMaxPool (Sing f) (Tensor m t a) -- ^ max pool
| TSoftMax (Tensor n t a)
| TReLu (Tensor n t a)
| TNorm (Tensor n t a)
| forall f m. (SingI f,SingI m,IsSubSamp f m n ~ 'True) => TSubSamp (Sing f) (Tensor m t a) -- ^ subsampling function
| forall m t2. TApp (Tensor n t a) (Tensor m t2 a)
| TFunc String (Tensor n t a)
| TSym String
| TArgT String (Tensor n t a)
| TArgS String String
| TArgI String Integer
| TArgF String Float
| TArgD String Double
| forall f. (SingI f) => TArgSing String (Sing (f::[Nat]))
| TLabel String (Tensor n t a) -- ^ When generating code, this label is used.
(<+>) :: forall n t a m t2. (Tensor n t a) -> (Tensor m t2 a) -> (Tensor n t a)
(<+>) = TApp
infixr 4 <+>
instance (Num t) => Num (Tensor n t a) where
(+) = TAdd
(-) = TSub
(*) = TMul
abs = TAbs
signum = TSign
fromInteger = TScalar . fromInteger
-- | get dimension from tensor
--
-- >>> dim (Tensor 1 :: Tensor '[192,10] Float Int)
-- [192,10]
class Dimension a where
dim :: a -> [Integer]
instance (SingI n) => Dimension (Tensor n t a) where
dim t = dim $ ty t
where
ty :: (SingI n) => Tensor n t a -> Sing n
ty _ = sing
instance Dimension (Sing (n::[Nat])) where
dim t = fromSing t
toValue :: forall n t a. Sing (n::[Nat]) -> a -> Tensor n t a
toValue _ a = Tensor a
(%*) :: forall o m n t a. (SingI o,SingI m,SingI n,IsMatMul m o n ~ 'True)
=> Tensor m t a -> Tensor o t a -> Tensor n t a
(%*) a b = TMatMul a b
(<--) :: SingI n => String -> Tensor n t a -> Tensor n t a
(<--) = TLabel
class FromTensor a where
fromTensor :: Tensor n t a -> a
toString :: Tensor n t a -> String
run :: Tensor n t a -> IO (Int,String,String)
| junjihashimoto/mathflow | src/MathFlow/Core.hs | bsd-3-clause | 5,390 | 0 | 18 | 1,257 | 2,022 | 1,121 | 901 | -1 | -1 |
{-# LANGUAGE TupleSections #-}
module FilePaths
( historyFilePath
, historyFileName
, lastRunStateFilePath
, lastRunStateFileName
, configFileName
, xdgName
, locateConfig
, xdgSyntaxDir
, syntaxDirName
, Script(..)
, locateScriptPath
, getAllScripts
)
where
import Prelude ()
import Prelude.MH
import Data.Text ( unpack )
import System.Directory ( doesFileExist
, doesDirectoryExist
, getDirectoryContents
, getPermissions
, executable
)
import System.Environment.XDG.BaseDir ( getUserConfigFile
, getAllConfigFiles
, getUserConfigDir
)
import System.FilePath ( (</>), takeBaseName )
xdgName :: String
xdgName = "matterhorn"
historyFileName :: FilePath
historyFileName = "history.txt"
lastRunStateFileName :: Text -> FilePath
lastRunStateFileName teamId = "last_run_state_" ++ unpack teamId ++ ".json"
configFileName :: FilePath
configFileName = "config.ini"
historyFilePath :: IO FilePath
historyFilePath = getUserConfigFile xdgName historyFileName
lastRunStateFilePath :: Text -> IO FilePath
lastRunStateFilePath teamId =
getUserConfigFile xdgName (lastRunStateFileName teamId)
-- | Get the XDG path to the user-specific syntax definition directory.
-- The path does not necessarily exist.
xdgSyntaxDir :: IO FilePath
xdgSyntaxDir = (</> syntaxDirName) <$> getUserConfigDir xdgName
syntaxDirName :: FilePath
syntaxDirName = "syntax"
-- | Find a specified configuration file by looking in all of the
-- supported locations.
locateConfig :: FilePath -> IO (Maybe FilePath)
locateConfig filename = do
xdgLocations <- getAllConfigFiles xdgName filename
let confLocations = ["./" <> filename] ++
xdgLocations ++
["/etc/matterhorn/" <> filename]
results <- forM confLocations $ \fp -> (fp,) <$> doesFileExist fp
return $ listToMaybe $ fst <$> filter snd results
scriptDirName :: FilePath
scriptDirName = "scripts"
data Script
= ScriptPath FilePath
| NonexecScriptPath FilePath
| ScriptNotFound
deriving (Eq, Read, Show)
toScript :: FilePath -> IO (Script)
toScript fp = do
perm <- getPermissions fp
return $ if executable perm
then ScriptPath fp
else NonexecScriptPath fp
isExecutable :: FilePath -> IO Bool
isExecutable fp = do
perm <- getPermissions fp
return (executable perm)
locateScriptPath :: FilePath -> IO Script
locateScriptPath name
| head name == '.' = return ScriptNotFound
| otherwise = do
xdgLocations <- getAllConfigFiles xdgName scriptDirName
let cmdLocations = [ xdgLoc ++ "/" ++ name
| xdgLoc <- xdgLocations
] ++ [ "/etc/matterhorn/scripts/" <> name ]
existingFiles <- filterM doesFileExist cmdLocations
executables <- mapM toScript existingFiles
return $ case executables of
(path:_) -> path
_ -> ScriptNotFound
-- | This returns a list of valid scripts, and a list of non-executable
-- scripts.
getAllScripts :: IO ([FilePath], [FilePath])
getAllScripts = do
xdgLocations <- getAllConfigFiles xdgName scriptDirName
let cmdLocations = xdgLocations ++ ["/etc/matterhorn/scripts"]
let getCommands dir = do
exists <- doesDirectoryExist dir
if exists
then map ((dir ++ "/") ++) `fmap` getDirectoryContents dir
else return []
let isNotHidden f = case f of
('.':_) -> False
[] -> False
_ -> True
allScripts <- concat `fmap` mapM getCommands cmdLocations
execs <- filterM isExecutable allScripts
nonexecs <- filterM (fmap not . isExecutable) allScripts
return ( filter isNotHidden $ map takeBaseName execs
, filter isNotHidden $ map takeBaseName nonexecs
)
| aisamanra/matterhorn | src/FilePaths.hs | bsd-3-clause | 3,926 | 0 | 17 | 1,020 | 925 | 479 | 446 | 99 | 4 |
{-# OPTIONS -fglasgow-exts #-}
module GisServer.Data.S57 () where
import Data.Binary
import Data.Binary.Get
import Data.Bits
import Data.Char
import Data.Maybe
import Data.Tree
import qualified Data.Map as M
import Int
import Data.ByteString.Lazy
import GisServer.Data.Common
import GisServer.Data.ISO8211
data S57Data = UnsignedInt Int
| SignedInt Int
| ExplicitPoint Double
| ImplicitPoint Int
| CharData String
| BitField ByteString
getExplicitPoint :: Int -> Get S57Data
getExplicitPoint n =
do v <- getStringN (lexLevel 0) n
return $ ExplicitPoint $ read v
getImplicitPoint :: Int -> Get S57Data
getImplicitPoint n =
do v <- getIntN n
return $ ImplicitPoint v
getSignedInt n =
do v <- getInt False n
return $ SignedInt v
getUnsignedInt n =
do v <- getInt True n
return $ SignedInt v
getCharData :: LexicalLevel -> Maybe Int -> Get S57Data
getCharData l (Just i) =
do s <- getStringN l i
return $ CharData s
getCharData l Nothing =
do s <- getStringTill l recordTermChar
return $ CharData s
getBitField bits =
let needPad = (bits `mod` 8) /= 0
bytes = bits `div` 8
bytes' = if (needPad) then bytes + 1 else bytes
in do bs <- getLazyByteString $ fromIntegral bytes'
return $ BitField bs
fieldParser :: LogicRecord -> M.Map String (Get S57Data)
fieldParser r =
let keys = M.keys $ dir
dir = lr_directory r
field k = snd $ fromJust $ M.lookup k dir
in M.fromList []
| alios/gisserver | GisServer/Data/S57.hs | bsd-3-clause | 1,560 | 0 | 11 | 422 | 518 | 262 | 256 | 52 | 2 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnSource]{Main pass of renamer}
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ScopedTypeVariables #-}
module RnTypes (
-- Type related stuff
rnHsType, rnLHsType, rnLHsTypes, rnContext,
rnHsKind, rnLHsKind, rnLHsMaybeKind,
rnHsSigType, rnLHsInstType, rnConDeclFields,
newTyVarNameRn, rnLHsTypeWithWildCards,
rnHsSigTypeWithWildCards,
-- Precence related stuff
mkOpAppRn, mkNegAppRn, mkOpFormRn, mkConOpPatRn,
checkPrecMatch, checkSectionPrec,
-- Binding related stuff
warnContextQuantification, warnUnusedForAlls,
bindSigTyVarsFV, bindHsTyVars, rnHsBndrSig,
extractHsTyRdrTyVars, extractHsTysRdrTyVars,
extractRdrKindSigVars, extractDataDefnKindVars,
filterInScope
) where
import {-# SOURCE #-} RnSplice( rnSpliceType )
import DynFlags
import HsSyn
import RnHsDoc ( rnLHsDoc, rnMbLHsDoc )
import RnEnv
import TcRnMonad
import RdrName
import PrelNames
import TysPrim ( funTyConName )
import Name
import SrcLoc
import NameSet
import Util
import BasicTypes ( compareFixity, funTyFixity, negateFixity,
Fixity(..), FixityDirection(..) )
import Outputable
import FastString
import Maybes
import Data.List ( nub, nubBy, deleteFirstsBy )
import Control.Monad ( unless, when )
#if __GLASGOW_HASKELL__ < 709
import Data.Monoid ( mappend, mempty, mconcat )
#endif
#include "HsVersions.h"
{-
These type renamers are in a separate module, rather than in (say) RnSource,
to break several loop.
*********************************************************
* *
\subsection{Renaming types}
* *
*********************************************************
-}
rnHsSigType :: SDoc -> LHsType RdrName -> RnM (LHsType Name, FreeVars)
-- rnHsSigType is used for source-language type signatures,
-- which use *implicit* universal quantification.
rnHsSigType doc_str ty = rnLHsType (TypeSigCtx doc_str) ty
rnLHsInstType :: SDoc -> LHsType RdrName -> RnM (LHsType Name, FreeVars)
-- Rename the type in an instance or standalone deriving decl
rnLHsInstType doc_str ty
= do { (ty', fvs) <- rnLHsType (GenericCtx doc_str) ty
; unless good_inst_ty (addErrAt (getLoc ty) (badInstTy ty))
; return (ty', fvs) }
where
good_inst_ty
| Just (_, _, L _ cls, _) <-
splitLHsInstDeclTy_maybe (flattenTopLevelLHsForAllTy ty)
, isTcOcc (rdrNameOcc cls) = True
| otherwise = False
badInstTy :: LHsType RdrName -> SDoc
badInstTy ty = ptext (sLit "Malformed instance:") <+> ppr ty
{-
rnHsType is here because we call it from loadInstDecl, and I didn't
want a gratuitous knot.
Note [Context quantification]
-----------------------------
Variables in type signatures are implicitly quantified
when (1) they are in a type signature not beginning
with "forall" or (2) in any qualified type T => R.
We are phasing out (2) since it leads to inconsistencies
(Trac #4426):
data A = A (a -> a) is an error
data A = A (Eq a => a -> a) binds "a"
data A = A (Eq a => a -> b) binds "a" and "b"
data A = A (() => a -> b) binds "a" and "b"
f :: forall a. a -> b is an error
f :: forall a. () => a -> b is an error
f :: forall a. a -> (() => b) binds "a" and "b"
The -fwarn-context-quantification flag warns about
this situation. See rnHsTyKi for case HsForAllTy Qualified.
-}
rnLHsTyKi :: Bool -- True <=> renaming a type, False <=> a kind
-> HsDocContext -> LHsType RdrName -> RnM (LHsType Name, FreeVars)
rnLHsTyKi isType doc (L loc ty)
= setSrcSpan loc $
do { (ty', fvs) <- rnHsTyKi isType doc ty
; return (L loc ty', fvs) }
rnLHsType :: HsDocContext -> LHsType RdrName -> RnM (LHsType Name, FreeVars)
rnLHsType = rnLHsTyKi True
rnLHsKind :: HsDocContext -> LHsKind RdrName -> RnM (LHsKind Name, FreeVars)
rnLHsKind = rnLHsTyKi False
rnLHsMaybeKind :: HsDocContext -> Maybe (LHsKind RdrName)
-> RnM (Maybe (LHsKind Name), FreeVars)
rnLHsMaybeKind _ Nothing
= return (Nothing, emptyFVs)
rnLHsMaybeKind doc (Just kind)
= do { (kind', fvs) <- rnLHsKind doc kind
; return (Just kind', fvs) }
rnHsType :: HsDocContext -> HsType RdrName -> RnM (HsType Name, FreeVars)
rnHsType = rnHsTyKi True
rnHsKind :: HsDocContext -> HsKind RdrName -> RnM (HsKind Name, FreeVars)
rnHsKind = rnHsTyKi False
rnHsTyKi :: Bool -> HsDocContext -> HsType RdrName -> RnM (HsType Name, FreeVars)
rnHsTyKi isType doc ty@HsForAllTy{}
= rnHsTyKiForAll isType doc (flattenTopLevelHsForAllTy ty)
rnHsTyKi isType _ (HsTyVar rdr_name)
= do { name <- rnTyVar isType rdr_name
; return (HsTyVar name, unitFV name) }
-- If we see (forall a . ty), without foralls on, the forall will give
-- a sensible error message, but we don't want to complain about the dot too
-- Hence the jiggery pokery with ty1
rnHsTyKi isType doc ty@(HsOpTy ty1 (wrapper, L loc op) ty2)
= ASSERT( isType ) setSrcSpan loc $
do { ops_ok <- xoptM Opt_TypeOperators
; op' <- if ops_ok
then rnTyVar isType op
else do { addErr (opTyErr op ty)
; return (mkUnboundName op) } -- Avoid double complaint
; let l_op' = L loc op'
; fix <- lookupTyFixityRn l_op'
; (ty1', fvs1) <- rnLHsType doc ty1
; (ty2', fvs2) <- rnLHsType doc ty2
; res_ty <- mkHsOpTyRn (\t1 t2 -> HsOpTy t1 (wrapper, l_op') t2)
op' fix ty1' ty2'
; return (res_ty, (fvs1 `plusFV` fvs2) `addOneFV` op') }
rnHsTyKi isType doc (HsParTy ty)
= do { (ty', fvs) <- rnLHsTyKi isType doc ty
; return (HsParTy ty', fvs) }
rnHsTyKi isType doc (HsBangTy b ty)
= ASSERT( isType )
do { (ty', fvs) <- rnLHsType doc ty
; return (HsBangTy b ty', fvs) }
rnHsTyKi _ doc ty@(HsRecTy flds)
= do { addErr (hang (ptext (sLit "Record syntax is illegal here:"))
2 (ppr ty))
; (flds', fvs) <- rnConDeclFields doc flds
; return (HsRecTy flds', fvs) }
rnHsTyKi isType doc (HsFunTy ty1 ty2)
= do { (ty1', fvs1) <- rnLHsTyKi isType doc ty1
-- Might find a for-all as the arg of a function type
; (ty2', fvs2) <- rnLHsTyKi isType doc ty2
-- Or as the result. This happens when reading Prelude.hi
-- when we find return :: forall m. Monad m -> forall a. a -> m a
-- Check for fixity rearrangements
; res_ty <- if isType
then mkHsOpTyRn HsFunTy funTyConName funTyFixity ty1' ty2'
else return (HsFunTy ty1' ty2')
; return (res_ty, fvs1 `plusFV` fvs2) }
rnHsTyKi isType doc listTy@(HsListTy ty)
= do { data_kinds <- xoptM Opt_DataKinds
; unless (data_kinds || isType) (addErr (dataKindsErr isType listTy))
; (ty', fvs) <- rnLHsTyKi isType doc ty
; return (HsListTy ty', fvs) }
rnHsTyKi isType doc (HsKindSig ty k)
= ASSERT( isType )
do { kind_sigs_ok <- xoptM Opt_KindSignatures
; unless kind_sigs_ok (badSigErr False doc ty)
; (ty', fvs1) <- rnLHsType doc ty
; (k', fvs2) <- rnLHsKind doc k
; return (HsKindSig ty' k', fvs1 `plusFV` fvs2) }
rnHsTyKi isType doc (HsPArrTy ty)
= ASSERT( isType )
do { (ty', fvs) <- rnLHsType doc ty
; return (HsPArrTy ty', fvs) }
-- Unboxed tuples are allowed to have poly-typed arguments. These
-- sometimes crop up as a result of CPR worker-wrappering dictionaries.
rnHsTyKi isType doc tupleTy@(HsTupleTy tup_con tys)
= do { data_kinds <- xoptM Opt_DataKinds
; unless (data_kinds || isType) (addErr (dataKindsErr isType tupleTy))
; (tys', fvs) <- mapFvRn (rnLHsTyKi isType doc) tys
; return (HsTupleTy tup_con tys', fvs) }
-- Ensure that a type-level integer is nonnegative (#8306, #8412)
rnHsTyKi isType _ tyLit@(HsTyLit t)
= do { data_kinds <- xoptM Opt_DataKinds
; unless data_kinds (addErr (dataKindsErr isType tyLit))
; when (negLit t) (addErr negLitErr)
; return (HsTyLit t, emptyFVs) }
where
negLit (HsStrTy _ _) = False
negLit (HsNumTy _ i) = i < 0
negLitErr = ptext (sLit "Illegal literal in type (type literals must not be negative):") <+> ppr tyLit
rnHsTyKi isType doc (HsAppTy ty1 ty2)
= do { (ty1', fvs1) <- rnLHsTyKi isType doc ty1
; (ty2', fvs2) <- rnLHsTyKi isType doc ty2
; return (HsAppTy ty1' ty2', fvs1 `plusFV` fvs2) }
rnHsTyKi isType doc (HsIParamTy n ty)
= ASSERT( isType )
do { (ty', fvs) <- rnLHsType doc ty
; return (HsIParamTy n ty', fvs) }
rnHsTyKi isType doc (HsEqTy ty1 ty2)
= ASSERT( isType )
do { (ty1', fvs1) <- rnLHsType doc ty1
; (ty2', fvs2) <- rnLHsType doc ty2
; return (HsEqTy ty1' ty2', fvs1 `plusFV` fvs2) }
rnHsTyKi isType _ (HsSpliceTy sp k)
= ASSERT( isType )
rnSpliceType sp k
rnHsTyKi isType doc (HsDocTy ty haddock_doc)
= ASSERT( isType )
do { (ty', fvs) <- rnLHsType doc ty
; haddock_doc' <- rnLHsDoc haddock_doc
; return (HsDocTy ty' haddock_doc', fvs) }
rnHsTyKi isType _ (HsCoreTy ty)
= ASSERT( isType )
return (HsCoreTy ty, emptyFVs)
-- The emptyFVs probably isn't quite right
-- but I don't think it matters
rnHsTyKi _ _ (HsWrapTy {})
= panic "rnHsTyKi"
rnHsTyKi isType doc ty@(HsExplicitListTy k tys)
= ASSERT( isType )
do { data_kinds <- xoptM Opt_DataKinds
; unless data_kinds (addErr (dataKindsErr isType ty))
; (tys', fvs) <- rnLHsTypes doc tys
; return (HsExplicitListTy k tys', fvs) }
rnHsTyKi isType doc ty@(HsExplicitTupleTy kis tys)
= ASSERT( isType )
do { data_kinds <- xoptM Opt_DataKinds
; unless data_kinds (addErr (dataKindsErr isType ty))
; (tys', fvs) <- rnLHsTypes doc tys
; return (HsExplicitTupleTy kis tys', fvs) }
rnHsTyKi isType _doc (HsWildCardTy (AnonWildCard PlaceHolder))
= ASSERT( isType )
do { loc <- getSrcSpanM
; uniq <- newUnique
; let name = mkInternalName uniq (mkTyVarOcc "_") loc
; return (HsWildCardTy (AnonWildCard name), emptyFVs) }
-- emptyFVs: this occurrence does not refer to a
-- binding, so don't treat it as a free variable
rnHsTyKi isType doc (HsWildCardTy (NamedWildCard rdr_name))
= ASSERT( isType )
do { not_in_scope <- isNothing `fmap` lookupOccRn_maybe rdr_name
; when not_in_scope $
-- When the named wild card is not in scope, it means it shouldn't be
-- there in the first place, i.e. rnHsSigTypeWithWildCards wasn't
-- used, so fail.
failWith $ text "Unexpected wild card:" <+> quotes (ppr rdr_name) $$
docOfHsDocContext doc
; name <- rnTyVar isType rdr_name
; return (HsWildCardTy (NamedWildCard name), emptyFVs) }
-- emptyFVs: this occurrence does not refer to a
-- binding, so don't treat it as a free variable
--------------
rnHsTyKiForAll :: Bool -> HsDocContext -> HsType RdrName
-> RnM (HsType Name, FreeVars)
rnHsTyKiForAll isType doc (HsForAllTy Implicit extra _ lctxt@(L _ ctxt) ty)
= ASSERT( isType ) do
-- Implicit quantifiction in source code (no kinds on tyvars)
-- Given the signature C => T we universally quantify
-- over FV(T) \ {in-scope-tyvars}
rdr_env <- getLocalRdrEnv
loc <- getSrcSpanM
let
(forall_kvs, forall_tvs) = filterInScope rdr_env $
extractHsTysRdrTyVars (ty:ctxt)
-- In for-all types we don't bring in scope
-- kind variables mentioned in kind signatures
-- (Well, not yet anyway....)
-- f :: Int -> T (a::k) -- Not allowed
-- The filterInScope is to ensure that we don't quantify over
-- type variables that are in scope; when GlasgowExts is off,
-- there usually won't be any, except for class signatures:
-- class C a where { op :: a -> a }
tyvar_bndrs = userHsTyVarBndrs loc forall_tvs
rnForAll doc Implicit extra forall_kvs (mkHsQTvs tyvar_bndrs) lctxt ty
rnHsTyKiForAll isType doc
fulltype@(HsForAllTy Qualified extra _ lctxt@(L _ ctxt) ty)
= ASSERT( isType ) do
rdr_env <- getLocalRdrEnv
loc <- getSrcSpanM
let
(forall_kvs, forall_tvs) = filterInScope rdr_env $
extractHsTysRdrTyVars (ty:ctxt)
tyvar_bndrs = userHsTyVarBndrs loc forall_tvs
in_type_doc = ptext (sLit "In the type") <+> quotes (ppr fulltype)
-- See Note [Context quantification]
warnContextQuantification (in_type_doc $$ docOfHsDocContext doc) tyvar_bndrs
rnForAll doc Implicit extra forall_kvs (mkHsQTvs tyvar_bndrs) lctxt ty
rnHsTyKiForAll isType doc
ty@(HsForAllTy Explicit extra forall_tyvars lctxt@(L _ ctxt) tau)
= ASSERT( isType ) do { -- Explicit quantification.
-- Check that the forall'd tyvars are actually
-- mentioned in the type, and produce a warning if not
let (kvs, mentioned) = extractHsTysRdrTyVars (tau:ctxt)
in_type_doc = ptext (sLit "In the type") <+> quotes (ppr ty)
; warnUnusedForAlls (in_type_doc $$ docOfHsDocContext doc)
forall_tyvars mentioned
; traceRn (text "rnHsTyKiForAll:Exlicit" <+> vcat
[ppr forall_tyvars, ppr lctxt,ppr tau ])
; rnForAll doc Explicit extra kvs forall_tyvars lctxt tau }
-- The following should never happen but keeps the completeness checker happy
rnHsTyKiForAll isType doc ty = rnHsTyKi isType doc ty
--------------
rnTyVar :: Bool -> RdrName -> RnM Name
rnTyVar is_type rdr_name
| is_type = lookupTypeOccRn rdr_name
| otherwise = lookupKindOccRn rdr_name
--------------
rnLHsTypes :: HsDocContext -> [LHsType RdrName]
-> RnM ([LHsType Name], FreeVars)
rnLHsTypes doc tys = mapFvRn (rnLHsType doc) tys
rnForAll :: HsDocContext -> HsExplicitFlag
-> Maybe SrcSpan -- Location of an extra-constraints wildcard
-> [RdrName] -- Kind variables
-> LHsTyVarBndrs RdrName -- Type variables
-> LHsContext RdrName -> LHsType RdrName
-> RnM (HsType Name, FreeVars)
rnForAll doc exp extra kvs forall_tyvars ctxt ty
| null kvs, null (hsQTvBndrs forall_tyvars), null (unLoc ctxt), isNothing extra
= rnHsType doc (unLoc ty)
-- One reason for this case is that a type like Int#
-- starts off as (HsForAllTy Implicit Nothing [] Int), in case
-- there is some quantification. Now that we have quantified
-- and discovered there are no type variables, it's nicer to turn
-- it into plain Int. If it were Int# instead of Int, we'd actually
-- get an error, because the body of a genuine for-all is
-- of kind *.
| otherwise
= bindHsTyVars doc Nothing kvs forall_tyvars $ \ new_tyvars ->
do { (new_ctxt, fvs1) <- rnContext doc ctxt
; (new_ty, fvs2) <- rnLHsType doc ty
; return (HsForAllTy exp extra new_tyvars new_ctxt new_ty, fvs1 `plusFV` fvs2) }
-- Retain the same implicit/explicit flag as before
-- so that we can later print it correctly
---------------
bindSigTyVarsFV :: [Name]
-> RnM (a, FreeVars)
-> RnM (a, FreeVars)
-- Used just before renaming the defn of a function
-- with a separate type signature, to bring its tyvars into scope
-- With no -XScopedTypeVariables, this is a no-op
bindSigTyVarsFV tvs thing_inside
= do { scoped_tyvars <- xoptM Opt_ScopedTypeVariables
; if not scoped_tyvars then
thing_inside
else
bindLocalNamesFV tvs thing_inside }
---------------
bindHsTyVars :: HsDocContext
-> Maybe a -- Just _ => an associated type decl
-> [RdrName] -- Kind variables from scope
-> LHsTyVarBndrs RdrName -- Type variables
-> (LHsTyVarBndrs Name -> RnM (b, FreeVars))
-> RnM (b, FreeVars)
-- (a) Bring kind variables into scope
-- both (i) passed in (kv_bndrs)
-- and (ii) mentioned in the kinds of tv_bndrs
-- (b) Bring type variables into scope
bindHsTyVars doc mb_assoc kv_bndrs tv_bndrs thing_inside
= do { rdr_env <- getLocalRdrEnv
; let tvs = hsQTvBndrs tv_bndrs
kvs_from_tv_bndrs = [ kv | L _ (KindedTyVar _ kind) <- tvs
, let (_, kvs) = extractHsTyRdrTyVars kind
, kv <- kvs ]
all_kvs' = nub (kv_bndrs ++ kvs_from_tv_bndrs)
all_kvs = filterOut (`elemLocalRdrEnv` rdr_env) all_kvs'
overlap_kvs = [ kv | kv <- all_kvs, any ((==) kv . hsLTyVarName) tvs ]
-- These variables appear both as kind and type variables
-- in the same declaration; eg type family T (x :: *) (y :: x)
-- We disallow this: too confusing!
; poly_kind <- xoptM Opt_PolyKinds
; unless (poly_kind || null all_kvs)
(addErr (badKindBndrs doc all_kvs))
; unless (null overlap_kvs)
(addErr (overlappingKindVars doc overlap_kvs))
; loc <- getSrcSpanM
; kv_names <- mapM (newLocalBndrRn . L loc) all_kvs
; bindLocalNamesFV kv_names $
do { let tv_names_w_loc = hsLTyVarLocNames tv_bndrs
rn_tv_bndr :: LHsTyVarBndr RdrName -> RnM (LHsTyVarBndr Name, FreeVars)
rn_tv_bndr (L loc (UserTyVar rdr))
= do { nm <- newTyVarNameRn mb_assoc rdr_env loc rdr
; return (L loc (UserTyVar nm), emptyFVs) }
rn_tv_bndr (L loc (KindedTyVar (L lv rdr) kind))
= do { sig_ok <- xoptM Opt_KindSignatures
; unless sig_ok (badSigErr False doc kind)
; nm <- newTyVarNameRn mb_assoc rdr_env loc rdr
; (kind', fvs) <- rnLHsKind doc kind
; return (L loc (KindedTyVar (L lv nm) kind'), fvs) }
-- Check for duplicate or shadowed tyvar bindrs
; checkDupRdrNames tv_names_w_loc
; when (isNothing mb_assoc) (checkShadowedRdrNames tv_names_w_loc)
; (tv_bndrs', fvs1) <- mapFvRn rn_tv_bndr tvs
; (res, fvs2) <- bindLocalNamesFV (map hsLTyVarName tv_bndrs') $
do { inner_rdr_env <- getLocalRdrEnv
; traceRn (text "bhtv" <+> vcat
[ ppr tvs, ppr kv_bndrs, ppr kvs_from_tv_bndrs
, ppr $ map (`elemLocalRdrEnv` rdr_env) all_kvs'
, ppr $ map (getUnique . rdrNameOcc) all_kvs'
, ppr all_kvs, ppr rdr_env, ppr inner_rdr_env ])
; thing_inside (HsQTvs { hsq_tvs = tv_bndrs', hsq_kvs = kv_names }) }
; return (res, fvs1 `plusFV` fvs2) } }
newTyVarNameRn :: Maybe a -> LocalRdrEnv -> SrcSpan -> RdrName -> RnM Name
newTyVarNameRn mb_assoc rdr_env loc rdr
| Just _ <- mb_assoc -- Use the same Name as the parent class decl
, Just n <- lookupLocalRdrEnv rdr_env rdr
= return n
| otherwise
= newLocalBndrRn (L loc rdr)
--------------------------------
rnHsBndrSig :: HsDocContext
-> HsWithBndrs RdrName (LHsType RdrName)
-> (HsWithBndrs Name (LHsType Name) -> RnM (a, FreeVars))
-> RnM (a, FreeVars)
rnHsBndrSig doc (HsWB { hswb_cts = ty@(L loc _) }) thing_inside
= do { sig_ok <- xoptM Opt_ScopedTypeVariables
; unless sig_ok (badSigErr True doc ty)
; let (kv_bndrs, tv_bndrs) = extractHsTyRdrTyVars ty
; name_env <- getLocalRdrEnv
; tv_names <- newLocalBndrsRn [L loc tv | tv <- tv_bndrs
, not (tv `elemLocalRdrEnv` name_env) ]
; kv_names <- newLocalBndrsRn [L loc kv | kv <- kv_bndrs
, not (kv `elemLocalRdrEnv` name_env) ]
; bindLocalNamesFV kv_names $
bindLocalNamesFV tv_names $
do { (ty', fvs1, wcs) <- rnLHsTypeWithWildCards doc ty
; (res, fvs2) <- thing_inside (HsWB { hswb_cts = ty', hswb_kvs = kv_names,
hswb_tvs = tv_names, hswb_wcs = wcs })
; return (res, fvs1 `plusFV` fvs2) } }
overlappingKindVars :: HsDocContext -> [RdrName] -> SDoc
overlappingKindVars doc kvs
= vcat [ ptext (sLit "Kind variable") <> plural kvs <+>
ptext (sLit "also used as type variable") <> plural kvs
<> colon <+> pprQuotedList kvs
, docOfHsDocContext doc ]
badKindBndrs :: HsDocContext -> [RdrName] -> SDoc
badKindBndrs doc kvs
= vcat [ hang (ptext (sLit "Unexpected kind variable") <> plural kvs
<+> pprQuotedList kvs)
2 (ptext (sLit "Perhaps you intended to use PolyKinds"))
, docOfHsDocContext doc ]
badSigErr :: Bool -> HsDocContext -> LHsType RdrName -> TcM ()
badSigErr is_type doc (L loc ty)
= setSrcSpan loc $ addErr $
vcat [ hang (ptext (sLit "Illegal") <+> what
<+> ptext (sLit "signature:") <+> quotes (ppr ty))
2 (ptext (sLit "Perhaps you intended to use") <+> flag)
, docOfHsDocContext doc ]
where
what | is_type = ptext (sLit "type")
| otherwise = ptext (sLit "kind")
flag | is_type = ptext (sLit "ScopedTypeVariables")
| otherwise = ptext (sLit "KindSignatures")
dataKindsErr :: Bool -> HsType RdrName -> SDoc
dataKindsErr is_type thing
= hang (ptext (sLit "Illegal") <+> what <> colon <+> quotes (ppr thing))
2 (ptext (sLit "Perhaps you intended to use DataKinds"))
where
what | is_type = ptext (sLit "type")
| otherwise = ptext (sLit "kind")
--------------------------------
-- | Variant of @rnHsSigType@ that supports wild cards. Also returns the wild
-- cards to bind.
rnHsSigTypeWithWildCards :: SDoc -> LHsType RdrName
-> RnM (LHsType Name, FreeVars, [Name])
rnHsSigTypeWithWildCards doc_str = rnLHsTypeWithWildCards (TypeSigCtx doc_str)
-- | Variant of @rnLHsType@ that supports wild cards. The third element of the
-- tuple consists of the freshly generated names of the anonymous wild cards
-- occurring in the type, as well as the names of the named wild cards in the
-- type that are not yet in scope.
rnLHsTypeWithWildCards :: HsDocContext -> LHsType RdrName
-> RnM (LHsType Name, FreeVars, [Name])
rnLHsTypeWithWildCards doc ty
= do { -- When there is a wild card at the end of the context, remove it and
-- add its location as the extra-constraints wild card in the
-- HsForAllTy.
let ty' = extractExtraCtsWc `fmap` flattenTopLevelLHsForAllTy ty
; checkValidPartialType doc ty'
; rdr_env <- getLocalRdrEnv
-- Filter out named wildcards that are already in scope
; let (_, wcs) = collectWildCards ty'
nwcs = [L loc n | L loc (NamedWildCard n) <- wcs
, not (elemLocalRdrEnv n rdr_env) ]
; bindLocatedLocalsRn nwcs $ \nwcs' -> do {
(ty'', fvs) <- rnLHsType doc ty'
-- Add the anonymous wildcards that have been given names during
-- renaming
; let (_, wcs') = collectWildCards ty''
awcs = filter (isAnonWildCard . unLoc) wcs'
; return (ty'', fvs, nwcs' ++ map (HsSyn.wildCardName . unLoc) awcs) } }
where
extractExtraCtsWc (HsForAllTy flag _ bndrs (L l ctxt) ty)
| Just (ctxt', ct) <- snocView ctxt
, L lx (HsWildCardTy (AnonWildCard _)) <- ignoreParens ct
= HsForAllTy flag (Just lx) bndrs (L l ctxt') ty
extractExtraCtsWc ty = ty
-- | Extract all wild cards from a type. The named and anonymous
-- extra-constraints wild cards are returned separately to be able to give
-- more accurate error messages.
collectWildCards
:: Eq name => LHsType name
-> ([Located (HsWildCardInfo name)], -- extra-constraints wild cards
[Located (HsWildCardInfo name)]) -- wild cards
collectWildCards lty = (nubBy sameWildCard extra, nubBy sameWildCard wcs)
where
(extra, wcs) = go lty
go (L loc ty) = case ty of
HsAppTy ty1 ty2 -> go ty1 `mappend` go ty2
HsFunTy ty1 ty2 -> go ty1 `mappend` go ty2
HsListTy ty -> go ty
HsPArrTy ty -> go ty
HsTupleTy _ tys -> gos tys
HsOpTy ty1 _ ty2 -> go ty1 `mappend` go ty2
HsParTy ty -> go ty
HsIParamTy _ ty -> go ty
HsEqTy ty1 ty2 -> go ty1 `mappend` go ty2
HsKindSig ty kind -> go ty `mappend` go kind
HsDocTy ty _ -> go ty
HsBangTy _ ty -> go ty
HsRecTy flds -> gos $ map (cd_fld_type . unLoc) flds
HsExplicitListTy _ tys -> gos tys
HsExplicitTupleTy _ tys -> gos tys
HsWrapTy _ ty -> go (L loc ty)
-- Interesting cases
HsWildCardTy wc -> ([], [L loc wc])
HsForAllTy _ _ _ (L _ ctxt) ty -> ctxtWcs `mappend` go ty
where
ctxt' = map ignoreParens ctxt
extraWcs = [L l wc | L l (HsWildCardTy wc) <- ctxt']
(_, wcs) = gos ctxt'
-- Remove extra-constraints wild cards from wcs
ctxtWcs = (extraWcs, deleteFirstsBy sameWildCard
(nubBy sameWildCard wcs) extraWcs)
-- HsQuasiQuoteTy, HsSpliceTy, HsCoreTy, HsTyLit
_ -> mempty
gos = mconcat . map go
-- | Check the validity of a partial type signature. The following things are
-- checked:
--
-- * Named extra-constraints wild cards aren't allowed,
-- e.g. invalid: @(Show a, _x) => a -> String@.
--
-- * There is only one extra-constraints wild card in the context and it must
-- come last, e.g. invalid: @(_, Show a) => a -> String@
-- or @(_, Show a, _) => a -> String@.
--
-- * There should be no unnamed wild cards in the context.
--
-- * An extra-constraints wild card can only occur in the top-level context.
-- This would be invalid: @(Eq a, _) => a -> (Num a, _) => a -> Bool@.
--
-- * Named wild cards occurring in the context must also occur in the monotype.
--
-- When an invalid wild card is found, we fail with an error.
checkValidPartialType :: HsDocContext -> LHsType RdrName -> RnM ()
checkValidPartialType doc lty
= do { whenNonEmpty isNamedWildCard inExtra $ \(L loc _) ->
failAt loc $ typeDoc $$
text "An extra-constraints wild card cannot be named" $$
docOfHsDocContext doc
; whenNonEmpty isAnonWildCard extraTopLevel $ \(L loc _) ->
failAt loc $ typeDoc $$
-- If there was a valid extra-constraints wild card, it should have
-- already been removed and its location should be stored in the
-- HsForAllTy
(if isJust extra
then text "Only a single extra-constraints wild card is allowed"
else fcat [ text "An extra-constraints wild card must occur"
, text "at the end of the constraints" ]) $$
docOfHsDocContext doc
; whenNonEmpty isAnonWildCard inCtxt $ \(L loc _) ->
failAt loc $ typeDoc $$
text "Anonymous wild cards are not allowed in constraints" $$
docOfHsDocContext doc
; whenNonEmpty isAnonWildCard nestedExtra $ \(L loc _) ->
failAt loc $ typeDoc $$
fcat [ text "An extra-constraints wild card is only allowed"
, text "in the top-level context" ] $$
docOfHsDocContext doc
; whenNonEmpty isNamedWildCard inCtxtNotInTau $ \(L loc name) ->
failAt loc $ typeDoc $$
fcat [ text "The named wild card" <+> quotes (ppr name) <> space
, text "is only allowed in the constraints"
, text "when it also occurs in the rest of the type" ] $$
docOfHsDocContext doc }
where
typeDoc = hang (text "Invalid partial type:") 2 (ppr lty)
(extra, ctxt, tau) = splitPartialType lty
(inExtra, _) = collectWildCards lty
(nestedExtra, inTau) = collectWildCards tau
(_, inCtxt) = mconcat $ map collectWildCards ctxt
inCtxtNotInTau = deleteFirstsBy sameWildCard inCtxt inTau
extraTopLevel = deleteFirstsBy sameWildCard inExtra nestedExtra
splitPartialType (L _ (HsForAllTy _ extra _ (L _ ctxt) ty))
= (extra, map ignoreParens ctxt, ty)
splitPartialType ty = (Nothing, [], ty)
whenNonEmpty test wcs f
= whenIsJust (listToMaybe $ filter (test . unLoc) wcs) f
{-
*********************************************************
* *
\subsection{Contexts and predicates}
* *
*********************************************************
-}
rnConDeclFields :: HsDocContext -> [LConDeclField RdrName]
-> RnM ([LConDeclField Name], FreeVars)
rnConDeclFields doc fields = mapFvRn (rnField doc) fields
rnField :: HsDocContext -> LConDeclField RdrName
-> RnM (LConDeclField Name, FreeVars)
rnField doc (L l (ConDeclField names ty haddock_doc))
= do { new_names <- mapM lookupLocatedTopBndrRn names
; (new_ty, fvs) <- rnLHsType doc ty
; new_haddock_doc <- rnMbLHsDoc haddock_doc
; return (L l (ConDeclField new_names new_ty new_haddock_doc), fvs) }
rnContext :: HsDocContext -> LHsContext RdrName -> RnM (LHsContext Name, FreeVars)
rnContext doc (L loc cxt)
= do { (cxt', fvs) <- rnLHsTypes doc cxt
; return (L loc cxt', fvs) }
{-
************************************************************************
* *
Fixities and precedence parsing
* *
************************************************************************
@mkOpAppRn@ deals with operator fixities. The argument expressions
are assumed to be already correctly arranged. It needs the fixities
recorded in the OpApp nodes, because fixity info applies to the things
the programmer actually wrote, so you can't find it out from the Name.
Furthermore, the second argument is guaranteed not to be another
operator application. Why? Because the parser parses all
operator appications left-associatively, EXCEPT negation, which
we need to handle specially.
Infix types are read in a *right-associative* way, so that
a `op` b `op` c
is always read in as
a `op` (b `op` c)
mkHsOpTyRn rearranges where necessary. The two arguments
have already been renamed and rearranged. It's made rather tiresome
by the presence of ->, which is a separate syntactic construct.
-}
---------------
-- Building (ty1 `op1` (ty21 `op2` ty22))
mkHsOpTyRn :: (LHsType Name -> LHsType Name -> HsType Name)
-> Name -> Fixity -> LHsType Name -> LHsType Name
-> RnM (HsType Name)
mkHsOpTyRn mk1 pp_op1 fix1 ty1 (L loc2 (HsOpTy ty21 (w2, op2) ty22))
= do { fix2 <- lookupTyFixityRn op2
; mk_hs_op_ty mk1 pp_op1 fix1 ty1
(\t1 t2 -> HsOpTy t1 (w2, op2) t2)
(unLoc op2) fix2 ty21 ty22 loc2 }
mkHsOpTyRn mk1 pp_op1 fix1 ty1 (L loc2 (HsFunTy ty21 ty22))
= mk_hs_op_ty mk1 pp_op1 fix1 ty1
HsFunTy funTyConName funTyFixity ty21 ty22 loc2
mkHsOpTyRn mk1 _ _ ty1 ty2 -- Default case, no rearrangment
= return (mk1 ty1 ty2)
---------------
mk_hs_op_ty :: (LHsType Name -> LHsType Name -> HsType Name)
-> Name -> Fixity -> LHsType Name
-> (LHsType Name -> LHsType Name -> HsType Name)
-> Name -> Fixity -> LHsType Name -> LHsType Name -> SrcSpan
-> RnM (HsType Name)
mk_hs_op_ty mk1 op1 fix1 ty1
mk2 op2 fix2 ty21 ty22 loc2
| nofix_error = do { precParseErr (op1,fix1) (op2,fix2)
; return (mk1 ty1 (L loc2 (mk2 ty21 ty22))) }
| associate_right = return (mk1 ty1 (L loc2 (mk2 ty21 ty22)))
| otherwise = do { -- Rearrange to ((ty1 `op1` ty21) `op2` ty22)
new_ty <- mkHsOpTyRn mk1 op1 fix1 ty1 ty21
; return (mk2 (noLoc new_ty) ty22) }
where
(nofix_error, associate_right) = compareFixity fix1 fix2
---------------------------
mkOpAppRn :: LHsExpr Name -- Left operand; already rearranged
-> LHsExpr Name -> Fixity -- Operator and fixity
-> LHsExpr Name -- Right operand (not an OpApp, but might
-- be a NegApp)
-> RnM (HsExpr Name)
-- (e11 `op1` e12) `op2` e2
mkOpAppRn e1@(L _ (OpApp e11 op1 fix1 e12)) op2 fix2 e2
| nofix_error
= do precParseErr (get_op op1,fix1) (get_op op2,fix2)
return (OpApp e1 op2 fix2 e2)
| associate_right = do
new_e <- mkOpAppRn e12 op2 fix2 e2
return (OpApp e11 op1 fix1 (L loc' new_e))
where
loc'= combineLocs e12 e2
(nofix_error, associate_right) = compareFixity fix1 fix2
---------------------------
-- (- neg_arg) `op` e2
mkOpAppRn e1@(L _ (NegApp neg_arg neg_name)) op2 fix2 e2
| nofix_error
= do precParseErr (negateName,negateFixity) (get_op op2,fix2)
return (OpApp e1 op2 fix2 e2)
| associate_right
= do new_e <- mkOpAppRn neg_arg op2 fix2 e2
return (NegApp (L loc' new_e) neg_name)
where
loc' = combineLocs neg_arg e2
(nofix_error, associate_right) = compareFixity negateFixity fix2
---------------------------
-- e1 `op` - neg_arg
mkOpAppRn e1 op1 fix1 e2@(L _ (NegApp _ _)) -- NegApp can occur on the right
| not associate_right -- We *want* right association
= do precParseErr (get_op op1, fix1) (negateName, negateFixity)
return (OpApp e1 op1 fix1 e2)
where
(_, associate_right) = compareFixity fix1 negateFixity
---------------------------
-- Default case
mkOpAppRn e1 op fix e2 -- Default case, no rearrangment
= ASSERT2( right_op_ok fix (unLoc e2),
ppr e1 $$ text "---" $$ ppr op $$ text "---" $$ ppr fix $$ text "---" $$ ppr e2
)
return (OpApp e1 op fix e2)
----------------------------
get_op :: LHsExpr Name -> Name
-- An unbound name could be either HsVar or HsUnboundVra
-- See RnExpr.rnUnboundVar
get_op (L _ (HsVar n)) = n
get_op (L _ (HsUnboundVar occ)) = mkUnboundName (mkRdrUnqual occ)
get_op other = pprPanic "get_op" (ppr other)
-- Parser left-associates everything, but
-- derived instances may have correctly-associated things to
-- in the right operarand. So we just check that the right operand is OK
right_op_ok :: Fixity -> HsExpr Name -> Bool
right_op_ok fix1 (OpApp _ _ fix2 _)
= not error_please && associate_right
where
(error_please, associate_right) = compareFixity fix1 fix2
right_op_ok _ _
= True
-- Parser initially makes negation bind more tightly than any other operator
-- And "deriving" code should respect this (use HsPar if not)
mkNegAppRn :: LHsExpr id -> SyntaxExpr id -> RnM (HsExpr id)
mkNegAppRn neg_arg neg_name
= ASSERT( not_op_app (unLoc neg_arg) )
return (NegApp neg_arg neg_name)
not_op_app :: HsExpr id -> Bool
not_op_app (OpApp _ _ _ _) = False
not_op_app _ = True
---------------------------
mkOpFormRn :: LHsCmdTop Name -- Left operand; already rearranged
-> LHsExpr Name -> Fixity -- Operator and fixity
-> LHsCmdTop Name -- Right operand (not an infix)
-> RnM (HsCmd Name)
-- (e11 `op1` e12) `op2` e2
mkOpFormRn a1@(L loc (HsCmdTop (L _ (HsCmdArrForm op1 (Just fix1) [a11,a12])) _ _ _))
op2 fix2 a2
| nofix_error
= do precParseErr (get_op op1,fix1) (get_op op2,fix2)
return (HsCmdArrForm op2 (Just fix2) [a1, a2])
| associate_right
= do new_c <- mkOpFormRn a12 op2 fix2 a2
return (HsCmdArrForm op1 (Just fix1)
[a11, L loc (HsCmdTop (L loc new_c)
placeHolderType placeHolderType [])])
-- TODO: locs are wrong
where
(nofix_error, associate_right) = compareFixity fix1 fix2
-- Default case
mkOpFormRn arg1 op fix arg2 -- Default case, no rearrangment
= return (HsCmdArrForm op (Just fix) [arg1, arg2])
--------------------------------------
mkConOpPatRn :: Located Name -> Fixity -> LPat Name -> LPat Name
-> RnM (Pat Name)
mkConOpPatRn op2 fix2 p1@(L loc (ConPatIn op1 (InfixCon p11 p12))) p2
= do { fix1 <- lookupFixityRn (unLoc op1)
; let (nofix_error, associate_right) = compareFixity fix1 fix2
; if nofix_error then do
{ precParseErr (unLoc op1,fix1) (unLoc op2,fix2)
; return (ConPatIn op2 (InfixCon p1 p2)) }
else if associate_right then do
{ new_p <- mkConOpPatRn op2 fix2 p12 p2
; return (ConPatIn op1 (InfixCon p11 (L loc new_p))) } -- XXX loc right?
else return (ConPatIn op2 (InfixCon p1 p2)) }
mkConOpPatRn op _ p1 p2 -- Default case, no rearrangment
= ASSERT( not_op_pat (unLoc p2) )
return (ConPatIn op (InfixCon p1 p2))
not_op_pat :: Pat Name -> Bool
not_op_pat (ConPatIn _ (InfixCon _ _)) = False
not_op_pat _ = True
--------------------------------------
checkPrecMatch :: Name -> MatchGroup Name body -> RnM ()
-- Check precedence of a function binding written infix
-- eg a `op` b `C` c = ...
-- See comments with rnExpr (OpApp ...) about "deriving"
checkPrecMatch op (MG { mg_alts = ms })
= mapM_ check ms
where
check (L _ (Match _ (L l1 p1 : L l2 p2 :_) _ _))
= setSrcSpan (combineSrcSpans l1 l2) $
do checkPrec op p1 False
checkPrec op p2 True
check _ = return ()
-- This can happen. Consider
-- a `op` True = ...
-- op = ...
-- The infix flag comes from the first binding of the group
-- but the second eqn has no args (an error, but not discovered
-- until the type checker). So we don't want to crash on the
-- second eqn.
checkPrec :: Name -> Pat Name -> Bool -> IOEnv (Env TcGblEnv TcLclEnv) ()
checkPrec op (ConPatIn op1 (InfixCon _ _)) right = do
op_fix@(Fixity op_prec op_dir) <- lookupFixityRn op
op1_fix@(Fixity op1_prec op1_dir) <- lookupFixityRn (unLoc op1)
let
inf_ok = op1_prec > op_prec ||
(op1_prec == op_prec &&
(op1_dir == InfixR && op_dir == InfixR && right ||
op1_dir == InfixL && op_dir == InfixL && not right))
info = (op, op_fix)
info1 = (unLoc op1, op1_fix)
(infol, infor) = if right then (info, info1) else (info1, info)
unless inf_ok (precParseErr infol infor)
checkPrec _ _ _
= return ()
-- Check precedence of (arg op) or (op arg) respectively
-- If arg is itself an operator application, then either
-- (a) its precedence must be higher than that of op
-- (b) its precedency & associativity must be the same as that of op
checkSectionPrec :: FixityDirection -> HsExpr RdrName
-> LHsExpr Name -> LHsExpr Name -> RnM ()
checkSectionPrec direction section op arg
= case unLoc arg of
OpApp _ op fix _ -> go_for_it (get_op op) fix
NegApp _ _ -> go_for_it negateName negateFixity
_ -> return ()
where
op_name = get_op op
go_for_it arg_op arg_fix@(Fixity arg_prec assoc) = do
op_fix@(Fixity op_prec _) <- lookupFixityRn op_name
unless (op_prec < arg_prec
|| (op_prec == arg_prec && direction == assoc))
(sectionPrecErr (op_name, op_fix)
(arg_op, arg_fix) section)
-- Precedence-related error messages
precParseErr :: (Name, Fixity) -> (Name, Fixity) -> RnM ()
precParseErr op1@(n1,_) op2@(n2,_)
| isUnboundName n1 || isUnboundName n2
= return () -- Avoid error cascade
| otherwise
= addErr $ hang (ptext (sLit "Precedence parsing error"))
4 (hsep [ptext (sLit "cannot mix"), ppr_opfix op1, ptext (sLit "and"),
ppr_opfix op2,
ptext (sLit "in the same infix expression")])
sectionPrecErr :: (Name, Fixity) -> (Name, Fixity) -> HsExpr RdrName -> RnM ()
sectionPrecErr op@(n1,_) arg_op@(n2,_) section
| isUnboundName n1 || isUnboundName n2
= return () -- Avoid error cascade
| otherwise
= addErr $ vcat [ptext (sLit "The operator") <+> ppr_opfix op <+> ptext (sLit "of a section"),
nest 4 (sep [ptext (sLit "must have lower precedence than that of the operand,"),
nest 2 (ptext (sLit "namely") <+> ppr_opfix arg_op)]),
nest 4 (ptext (sLit "in the section:") <+> quotes (ppr section))]
ppr_opfix :: (Name, Fixity) -> SDoc
ppr_opfix (op, fixity) = pp_op <+> brackets (ppr fixity)
where
pp_op | op == negateName = ptext (sLit "prefix `-'")
| otherwise = quotes (ppr op)
{-
*********************************************************
* *
\subsection{Errors}
* *
*********************************************************
-}
warnUnusedForAlls :: SDoc -> LHsTyVarBndrs RdrName -> [RdrName] -> TcM ()
warnUnusedForAlls in_doc bound mentioned_rdrs
= whenWOptM Opt_WarnUnusedMatches $
mapM_ add_warn bound_but_not_used
where
bound_names = hsLTyVarLocNames bound
bound_but_not_used = filterOut ((`elem` mentioned_rdrs) . unLoc) bound_names
add_warn (L loc tv)
= addWarnAt loc $
vcat [ ptext (sLit "Unused quantified type variable") <+> quotes (ppr tv)
, in_doc ]
warnContextQuantification :: SDoc -> [LHsTyVarBndr RdrName] -> TcM ()
warnContextQuantification in_doc tvs
= whenWOptM Opt_WarnContextQuantification $
mapM_ add_warn tvs
where
add_warn (L loc tv)
= addWarnAt loc $
vcat [ ptext (sLit "Variable") <+> quotes (ppr tv) <+>
ptext (sLit "is implicitly quantified due to a context") $$
ptext (sLit "Use explicit forall syntax instead.") $$
ptext (sLit "This will become an error in GHC 7.12.")
, in_doc ]
opTyErr :: RdrName -> HsType RdrName -> SDoc
opTyErr op ty@(HsOpTy ty1 _ _)
= hang (ptext (sLit "Illegal operator") <+> quotes (ppr op) <+> ptext (sLit "in type") <+> quotes (ppr ty))
2 extra
where
extra | op == dot_tv_RDR && forall_head ty1
= perhapsForallMsg
| otherwise
= ptext (sLit "Use TypeOperators to allow operators in types")
forall_head (L _ (HsTyVar tv)) = tv == forall_tv_RDR
forall_head (L _ (HsAppTy ty _)) = forall_head ty
forall_head _other = False
opTyErr _ ty = pprPanic "opTyErr: Not an op" (ppr ty)
{-
************************************************************************
* *
Finding the free type variables of a (HsType RdrName)
* *
************************************************************************
Note [Kind and type-variable binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In a type signature we may implicitly bind type variable and, more
recently, kind variables. For example:
* f :: a -> a
f = ...
Here we need to find the free type variables of (a -> a),
so that we know what to quantify
* class C (a :: k) where ...
This binds 'k' in ..., as well as 'a'
* f (x :: a -> [a]) = ....
Here we bind 'a' in ....
* f (x :: T a -> T (b :: k)) = ...
Here we bind both 'a' and the kind variable 'k'
* type instance F (T (a :: Maybe k)) = ...a...k...
Here we want to constrain the kind of 'a', and bind 'k'.
In general we want to walk over a type, and find
* Its free type variables
* The free kind variables of any kind signatures in the type
Hence we returns a pair (kind-vars, type vars)
See also Note [HsBSig binder lists] in HsTypes
-}
type FreeKiTyVars = ([RdrName], [RdrName])
filterInScope :: LocalRdrEnv -> FreeKiTyVars -> FreeKiTyVars
filterInScope rdr_env (kvs, tvs)
= (filterOut in_scope kvs, filterOut in_scope tvs)
where
in_scope tv = tv `elemLocalRdrEnv` rdr_env
extractHsTyRdrTyVars :: LHsType RdrName -> FreeKiTyVars
-- extractHsTyRdrNames finds the free (kind, type) variables of a HsType
-- or the free (sort, kind) variables of a HsKind
-- It's used when making the for-alls explicit.
-- See Note [Kind and type-variable binders]
extractHsTyRdrTyVars ty
= case extract_lty ty ([],[]) of
(kvs, tvs) -> (nub kvs, nub tvs)
extractHsTysRdrTyVars :: [LHsType RdrName] -> FreeKiTyVars
-- See Note [Kind and type-variable binders]
extractHsTysRdrTyVars ty
= case extract_ltys ty ([],[]) of
(kvs, tvs) -> (nub kvs, nub tvs)
extractRdrKindSigVars :: Maybe (LHsKind RdrName) -> [RdrName]
extractRdrKindSigVars Nothing = []
extractRdrKindSigVars (Just k) = nub (fst (extract_lkind k ([],[])))
extractDataDefnKindVars :: HsDataDefn RdrName -> [RdrName]
-- Get the scoped kind variables mentioned free in the constructor decls
-- Eg data T a = T1 (S (a :: k) | forall (b::k). T2 (S b)
-- Here k should scope over the whole definition
extractDataDefnKindVars (HsDataDefn { dd_ctxt = ctxt, dd_kindSig = ksig
, dd_cons = cons, dd_derivs = derivs })
= fst $ extract_lctxt ctxt $
extract_mb extract_lkind ksig $
extract_mb (extract_ltys . unLoc) derivs $
foldr (extract_con . unLoc) ([],[]) cons
where
extract_con (ConDecl { con_res = ResTyGADT {} }) acc = acc
extract_con (ConDecl { con_res = ResTyH98, con_qvars = qvs
, con_cxt = ctxt, con_details = details }) acc
= extract_hs_tv_bndrs qvs acc $
extract_lctxt ctxt $
extract_ltys (hsConDeclArgTys details) ([],[])
extract_lctxt :: LHsContext RdrName -> FreeKiTyVars -> FreeKiTyVars
extract_lctxt ctxt = extract_ltys (unLoc ctxt)
extract_ltys :: [LHsType RdrName] -> FreeKiTyVars -> FreeKiTyVars
extract_ltys tys acc = foldr extract_lty acc tys
extract_mb :: (a -> FreeKiTyVars -> FreeKiTyVars) -> Maybe a -> FreeKiTyVars -> FreeKiTyVars
extract_mb _ Nothing acc = acc
extract_mb f (Just x) acc = f x acc
extract_lkind :: LHsType RdrName -> FreeKiTyVars -> FreeKiTyVars
extract_lkind kind (acc_kvs, acc_tvs) = case extract_lty kind ([], acc_kvs) of
(_, res_kvs) -> (res_kvs, acc_tvs)
-- Kinds shouldn't have sort signatures!
extract_lty :: LHsType RdrName -> FreeKiTyVars -> FreeKiTyVars
extract_lty (L _ ty) acc
= case ty of
HsTyVar tv -> extract_tv tv acc
HsBangTy _ ty -> extract_lty ty acc
HsRecTy flds -> foldr (extract_lty . cd_fld_type . unLoc) acc
flds
HsAppTy ty1 ty2 -> extract_lty ty1 (extract_lty ty2 acc)
HsListTy ty -> extract_lty ty acc
HsPArrTy ty -> extract_lty ty acc
HsTupleTy _ tys -> extract_ltys tys acc
HsFunTy ty1 ty2 -> extract_lty ty1 (extract_lty ty2 acc)
HsIParamTy _ ty -> extract_lty ty acc
HsEqTy ty1 ty2 -> extract_lty ty1 (extract_lty ty2 acc)
HsOpTy ty1 (_, (L _ tv)) ty2 -> extract_tv tv (extract_lty ty1 (extract_lty ty2 acc))
HsParTy ty -> extract_lty ty acc
HsCoreTy {} -> acc -- The type is closed
HsSpliceTy {} -> acc -- Type splices mention no type variables
HsDocTy ty _ -> extract_lty ty acc
HsExplicitListTy _ tys -> extract_ltys tys acc
HsExplicitTupleTy _ tys -> extract_ltys tys acc
HsTyLit _ -> acc
HsWrapTy _ _ -> panic "extract_lty"
HsKindSig ty ki -> extract_lty ty (extract_lkind ki acc)
HsForAllTy _ _ tvs cx ty -> extract_hs_tv_bndrs tvs acc $
extract_lctxt cx $
extract_lty ty ([],[])
-- We deal with these separately in rnLHsTypeWithWildCards
HsWildCardTy _ -> acc
extract_hs_tv_bndrs :: LHsTyVarBndrs RdrName -> FreeKiTyVars
-> FreeKiTyVars -> FreeKiTyVars
extract_hs_tv_bndrs (HsQTvs { hsq_tvs = tvs })
(acc_kvs, acc_tvs) -- Note accumulator comes first
(body_kvs, body_tvs)
| null tvs
= (body_kvs ++ acc_kvs, body_tvs ++ acc_tvs)
| otherwise
= (acc_kvs ++ filterOut (`elem` local_kvs) body_kvs,
acc_tvs ++ filterOut (`elem` local_tvs) body_tvs)
where
local_tvs = map hsLTyVarName tvs
(_, local_kvs) = foldr extract_lty ([], []) [k | L _ (KindedTyVar _ k) <- tvs]
-- These kind variables are bound here if not bound further out
extract_tv :: RdrName -> FreeKiTyVars -> FreeKiTyVars
extract_tv tv acc
| isRdrTyVar tv = case acc of (kvs,tvs) -> (kvs, tv : tvs)
| otherwise = acc
| urbanslug/ghc | compiler/rename/RnTypes.hs | bsd-3-clause | 49,512 | 0 | 22 | 14,312 | 12,655 | 6,450 | 6,205 | -1 | -1 |
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE DataKinds #-}
{-# OPTIONS_GHC -Wall #-}
module Main where
import Tower.Prelude
import Tower.N
import Test.Tasty (TestName, TestTree, testGroup, defaultMain, localOption)
import Test.Tasty.QuickCheck
import Test.DocTest
-- import Test.QuickCheck
main :: IO ()
main = do
doctest ["src/Tower/Examples.hs"]
defaultMain tests
data LawArity a =
Nonary Bool |
Unary (a -> Bool) |
Binary (a -> a -> Bool) |
Ternary (a -> a -> a -> Bool) |
Ornary (a -> a -> a -> a -> Bool) |
Failiary (a -> Property)
data LawArity2 a b =
Unary2 (a -> Bool) |
Binary2 (a -> b -> Bool) |
Ternary2 (a -> a -> b -> Bool) |
Ternary2' (a -> b -> b -> Bool) |
Failiary2 (a -> Property)
type Law a = (TestName, LawArity a)
type Law2 a b = (TestName, LawArity2 a b)
testLawOf :: (Arbitrary a, Show a) => [a] -> Law a -> TestTree
testLawOf _ (name, Nonary f) = testProperty name f
testLawOf _ (name, Unary f) = testProperty name f
testLawOf _ (name, Binary f) = testProperty name f
testLawOf _ (name, Ternary f) = testProperty name f
testLawOf _ (name, Ornary f) = testProperty name f
testLawOf _ (name, Failiary f) = testProperty name f
testLawOf2 :: (Arbitrary a, Show a, Arbitrary b, Show b) =>
[(a,b)] -> Law2 a b -> TestTree
testLawOf2 _ (name, Unary2 f) = testProperty name f
testLawOf2 _ (name, Binary2 f) = testProperty name f
testLawOf2 _ (name, Ternary2 f) = testProperty name f
testLawOf2 _ (name, Ternary2' f) = testProperty name f
testLawOf2 _ (name, Failiary2 f) = testProperty name f
tests :: TestTree
tests =
testGroup "Tower"
[ testsInt
, testsFloat
, testsBool
, testsVInt
, testsVFloat
, testsMInt
, testsMFloat
, testsNInt
]
testsInt :: TestTree
testsInt = testGroup "Int"
[ testGroup "Additive" $ testLawOf ([]::[Int]) <$>
additiveLaws
, testGroup "Additive Group" $ testLawOf ([]::[Int]) <$>
additiveGroupLaws
, testGroup "Multiplicative" $ testLawOf ([]::[Int]) <$>
multiplicativeLaws
, testGroup "Distribution" $ testLawOf ([]::[Int])
<$> distributionLaws
, testGroup "Integral" $ testLawOf ([]::[Int]) <$>
integralLaws
, testGroup "Signed" $ testLawOf ([]::[Int]) <$>
signedLaws
]
testsFloat :: TestTree
testsFloat = testGroup "Float"
[ testGroup "Additive - Associative Fail" $ testLawOf ([]::[Float]) <$>
additiveLawsFail
, testGroup "Additive Group" $ testLawOf ([]::[Float]) <$>
additiveGroupLaws
, testGroup "Multiplicative - Associative Fail" $
testLawOf ([]::[Float]) <$>
multiplicativeLawsFail
, testGroup "MultiplicativeGroup" $ testLawOf ([]::[Float]) <$>
multiplicativeGroupLaws
, testGroup "Distribution - Fail" $ testLawOf ([]::[Float]) <$>
distributionLawsFail
, testGroup "Signed" $ testLawOf ([]::[Float]) <$>
signedLaws
, testGroup "Bounded Field" $ testLawOf ([]::[Float]) <$>
boundedFieldLaws
, testGroup "Metric" $ testLawOf ([]::[Float]) <$> metricFloatLaws
, testGroup "Quotient Field" $ testLawOf ([]::[Float]) <$>
quotientFieldLaws
, testGroup "Exponential Ring" $ testLawOf ([]::[Float]) <$> expRingLaws
, testGroup "Exponential Field" $ testLawOf ([]::[Float]) <$> expFieldLaws
]
testsBool :: TestTree
testsBool = testGroup "Bool"
[ testGroup "Idempotent" $ testLawOf ([]::[Bool]) <$>
idempotentLaws
, testGroup "Additive" $ testLawOf ([]::[Bool]) <$>
additiveLaws
, testGroup "Multiplicative" $ testLawOf ([]::[Bool]) <$>
multiplicativeLaws
, testGroup "Distribution" $ testLawOf ([]::[Bool])
<$> distributionLaws
]
testsVInt :: TestTree
testsVInt = testGroup "V 6 Int"
[ testGroup "Additive" $ testLawOf ([]::[V 6 Int]) <$>
additiveLaws
, testGroup "Additive Group" $ testLawOf ([]::[V 6 Int]) <$>
additiveGroupLaws
, testGroup "Multiplicative" $ testLawOf ([]::[V 6 Int]) <$>
multiplicativeLaws
, testGroup "Distribution" $ testLawOf ([]::[V 6 Int])
<$> distributionLaws
, testGroup "Additive Module" $ testLawOf2 ([]::[(V 6 Int, Int)]) <$>
additiveModuleLaws
, testGroup "Additive Group Module" $ testLawOf2 ([]::[(V 6 Int, Int)]) <$>
additiveGroupModuleLaws
, testGroup "Multiplicative Module" $ testLawOf2 ([]::[(V 6 Int, Int)]) <$>
multiplicativeModuleLaws
, testGroup "Additive Basis" $ testLawOf ([]::[V 6 Int]) <$>
additiveBasisLaws
, testGroup "Additive Group Basis" $ testLawOf ([]::[V 6 Int]) <$>
additiveGroupBasisLaws
, testGroup "Multiplicative Basis" $ testLawOf ([]::[V 6 Int]) <$>
multiplicativeBasisLaws
]
testsMInt :: TestTree
testsMInt = testGroup "M 4 3 Int"
[ testGroup "Additive" $ testLawOf ([]::[M 4 3 Int]) <$>
additiveLaws
, testGroup "Additive Group" $ testLawOf ([]::[M 4 3 Int]) <$>
additiveGroupLaws
, testGroup "Multiplicative" $ testLawOf ([]::[M 4 3 Int]) <$>
multiplicativeLaws
, testGroup "Distribution" $ testLawOf ([]::[M 4 3 Int])
<$> distributionLaws
, testGroup "Additive Module" $ testLawOf2 ([]::[(M 4 3 Int, Int)]) <$>
additiveModuleLaws
, testGroup "Additive Group Module" $ testLawOf2 ([]::[(M 4 3 Int, Int)]) <$>
additiveGroupModuleLaws
, testGroup "Multiplicative Module" $ testLawOf2 ([]::[(M 4 3 Int, Int)]) <$>
multiplicativeModuleLaws
, testGroup "Additive Basis" $ testLawOf ([]::[M 4 3 Int]) <$>
additiveBasisLaws
, testGroup "Additive Group Basis" $ testLawOf ([]::[M 4 3 Int]) <$>
additiveGroupBasisLaws
, testGroup "Multiplicative Basis" $ testLawOf ([]::[M 4 3 Int]) <$>
multiplicativeBasisLaws
]
testsNInt :: TestTree
testsNInt = testGroup "N [2,3,2] Int"
[ testGroup "Additive" $ testLawOf ([]::[N [2,3,2] Int]) <$>
additiveLaws
, testGroup "Additive Group" $ testLawOf ([]::[N [2,3,2] Int]) <$>
additiveGroupLaws
, testGroup "Multiplicative" $ testLawOf ([]::[N [2,3,2] Int]) <$>
multiplicativeLaws
, testGroup "Distribution" $ testLawOf ([]::[N [2,3,2] Int])
<$> distributionLaws
, testGroup "Additive Module" $ testLawOf2 ([]::[(N [2,3,2] Int, Int)]) <$>
additiveModuleLaws
, testGroup "Additive Group Module" $ testLawOf2 ([]::[(N [2,3,2] Int, Int)]) <$>
additiveGroupModuleLaws
, testGroup "Multiplicative Module" $ testLawOf2 ([]::[(N [2,3,2] Int, Int)]) <$>
multiplicativeModuleLaws
, testGroup "Additive Basis" $ testLawOf ([]::[N [2,3,2] Int]) <$>
additiveBasisLaws
, testGroup "Additive Group Basis" $ testLawOf ([]::[N [2,3,2] Int]) <$>
additiveGroupBasisLaws
, testGroup "Multiplicative Basis" $ testLawOf ([]::[N [2,3,2] Int]) <$>
multiplicativeBasisLaws
]
testsVFloat :: TestTree
testsVFloat = testGroup "V 6 Float"
[ testGroup "Additive - Associative" $
localOption (QuickCheckTests 1000) . testLawOf ([]::[V 6 Float]) <$>
additiveLawsFail
, testGroup "Additive Group" $
testLawOf ([]::[V 6 Float]) <$>
additiveGroupLaws
, testGroup "Multiplicative - Associative" $
localOption (QuickCheckTests 1000) . testLawOf ([]::[V 6 Float]) <$>
multiplicativeLawsFail
, testGroup "MultiplicativeGroup" $ testLawOf ([]::[V 6 Float]) <$>
multiplicativeGroupLaws
, testGroup "Distribution" $
localOption (QuickCheckTests 1000) . testLawOf ([]::[V 6 Float]) <$>
distributionLawsFail
, testGroup "Signed" $ testLawOf ([]::[V 6 Float]) <$>
signedLaws
, testGroup "Metric" $ testLawOf ([]::[V 6 Float]) <$> metricRepFloatLaws
, testGroup "Exponential Ring" $ testLawOf ([]::[V 6 Float]) <$> expRingRepLaws
, testGroup "Exponential Field" $ testLawOf ([]::[V 6 Float]) <$> expFieldRepLaws
, testGroup "Additive Module" $ localOption (QuickCheckTests 1000) .
testLawOf2 ([]::[(V 6 Float, Float)]) <$>
additiveModuleLawsFail
, testGroup "Additive Group Module" $ localOption (QuickCheckTests 1000) .
testLawOf2 ([]::[(V 6 Float, Float)]) <$>
additiveGroupModuleLawsFail
, testGroup "Multiplicative Module" $ localOption (QuickCheckTests 1000) .
testLawOf2 ([]::[(V 6 Float, Float)]) <$>
multiplicativeModuleLawsFail
, testGroup "Multiplicative Group Module" $
testLawOf2 ([]::[(V 6 Float, Float)]) <$>
multiplicativeGroupModuleLaws
, testGroup "Additive Basis" $ testLawOf ([]::[V 6 Float]) <$>
additiveBasisLaws
, testGroup "Additive Group Basis" $ testLawOf ([]::[V 6 Float]) <$>
additiveGroupBasisLaws
, testGroup "Multiplicative Basis" $ localOption (QuickCheckTests 1000) .
testLawOf ([]::[V 6 Float]) <$>
multiplicativeBasisLawsFail
, testGroup "Multiplicative Group Basis" $ testLawOf ([]::[V 6 Float]) <$>
multiplicativeGroupBasisLaws
, testGroup "Banach" $ testLawOf2 ([]::[(V 6 Float, Float)]) <$>
banachLaws
]
testsMFloat :: TestTree
testsMFloat = testGroup "M 4 3 Float"
[ testGroup "Additive - Associative - Failure" $
localOption (QuickCheckTests 1000) . testLawOf ([]::[M 4 3 Float]) <$>
additiveLawsFail
, testGroup "Additive Group" $ testLawOf ([]::[M 4 3 Float]) <$>
additiveGroupLaws
, testGroup "Multiplicative - Associative Failure" $
localOption (QuickCheckTests 1000) . testLawOf ([]::[M 4 3 Float]) <$>
multiplicativeLawsFail
, testGroup "MultiplicativeGroup" $ testLawOf ([]::[M 4 3 Float]) <$>
multiplicativeGroupLaws
, testGroup "Distribution - Fail" $
localOption (QuickCheckTests 1000) . testLawOf ([]::[M 4 3 Float]) <$>
distributionLawsFail
, testGroup "Signed" $ testLawOf ([]::[M 4 3 Float]) <$>
signedLaws
, testGroup "Metric" $ testLawOf ([]::[M 4 3 Float]) <$> metricRepFloatLaws
, testGroup "Exponential Ring" $ testLawOf ([]::[M 4 3 Float]) <$> expRingRepLaws
, testGroup "Exponential Field" $ testLawOf ([]::[M 4 3 Float]) <$> expFieldRepLaws
, testGroup "Additive Module" $ testLawOf2 ([]::[(M 4 3 Float, Float)]) <$>
additiveModuleLaws
, testGroup "Additive Group Module" $ testLawOf2 ([]::[(M 4 3 Float, Float)]) <$>
additiveGroupModuleLaws
, testGroup "Multiplicative Module" $
localOption (QuickCheckTests 1000) .
testLawOf2 ([]::[(M 4 3 Float, Float)]) <$>
multiplicativeModuleLawsFail
, testGroup "Multiplicative Group Module" $ testLawOf2 ([]::[(M 4 3 Float, Float)]) <$>
multiplicativeGroupModuleLaws
, testGroup "Additive Basis" $ testLawOf ([]::[M 4 3 Float]) <$>
additiveBasisLaws
, testGroup "Additive Group Basis" $ testLawOf ([]::[M 4 3 Float]) <$>
additiveGroupBasisLaws
, testGroup "Multiplicative Basis" $ localOption (QuickCheckTests 1000) .
testLawOf ([]::[M 4 3 Float]) <$>
multiplicativeBasisLawsFail
, testGroup "Multiplicative Group Basis" $ testLawOf ([]::[M 4 3 Float]) <$>
multiplicativeGroupBasisLaws
]
idempotentLaws ::
( Eq a
, Additive a
, Multiplicative a
) => [Law a]
idempotentLaws =
[ ( "idempotent: a + a == a"
, Unary (\a -> a + a == a))
, ( "idempotent: a * a == a"
, Unary (\a -> a * a == a))
]
additiveLaws ::
( Eq a
, Additive a
) => [Law a]
additiveLaws =
[ ( "associative: (a + b) + c = a + (b + c)"
, Ternary (\a b c -> (a + b) + c == a + (b + c)))
, ("left id: zero + a = a", Unary (\a -> zero + a == a))
, ("right id: a + zero = a", Unary (\a -> a + zero == a))
, ("commutative: a + b == b + a", Binary (\a b -> a + b == b + a))
]
additiveLawsApprox ::
( Eq a
, Additive a
, Epsilon a
) => [Law a]
additiveLawsApprox =
[ ( "associative: (a + b) + c ≈ a + (b + c)"
, Ternary (\a b c -> (a + b) + c ≈ a + (b + c)))
, ("left id: zero + a = a", Unary (\a -> zero + a == a))
, ("right id: a + zero = a", Unary (\a -> a + zero == a))
, ("commutative: a + b == b + a", Binary (\a b -> a + b == b + a))
]
additiveLawsFail ::
( Eq a
, Additive a
, Show a
, Arbitrary a
) => [Law a]
additiveLawsFail =
[ ( "associative: (a + b) + c = a + (b + c)"
, Failiary $ expectFailure . (\a b c -> (a + b) + c == a + (b + c)))
, ("left id: zero + a = a", Unary (\a -> zero + a == a))
, ("right id: a + zero = a", Unary (\a -> a + zero == a))
, ("commutative: a + b == b + a", Binary (\a b -> a + b == b + a))
]
additiveGroupLaws ::
( Eq a
, AdditiveGroup a
) => [Law a]
additiveGroupLaws =
[ ("minus: a - a = zero", Unary (\a -> (a - a) == zero))
, ("negate minus: negate a == zero - a", Unary (\a -> negate a == zero - a))
, ("negate cancel: negate a + a == zero", Unary (\a -> negate a + a == zero))
]
multiplicativeLaws ::
( Eq a
, Multiplicative a
) => [Law a]
multiplicativeLaws =
[ ( "associative: (a * b) * c = a * (b * c)"
, Ternary (\a b c -> (a * b) * c == a * (b * c)))
, ("left id: one * a = a", Unary (\a -> one * a == a))
, ("right id: a * one = a", Unary (\a -> a * one == a))
, ("commutative: a * b == b * a", Binary (\a b -> a * b == b * a))
]
multiplicativeLawsApprox ::
( Eq a
, Epsilon a
, Multiplicative a
) => [Law a]
multiplicativeLawsApprox =
[ ("associative: (a * b) * c ≈ a * (b * c)"
, Ternary (\a b c -> (a * b) * c ≈ a * (b * c)))
, ("left id: one * a = a", Unary (\a -> one * a == a))
, ("right id: a * one = a", Unary (\a -> a * one == a))
, ("commutative: a * b == b * a", Binary (\a b -> a * b == b * a))
]
multiplicativeLawsFail ::
( Eq a
, Show a
, Arbitrary a
, Multiplicative a
) => [Law a]
multiplicativeLawsFail =
[ ("associative: (a * b) * c = a * (b * c)"
, Failiary $ expectFailure . (\a b c -> (a * b) * c == a * (b * c)))
, ("left id: one * a = a", Unary (\a -> one * a == a))
, ("right id: a * one = a", Unary (\a -> a * one == a))
, ("commutative: a * b == b * a", Binary (\a b -> a * b == b * a))
]
multiplicativeGroupLaws ::
( Epsilon a
, Eq a
, MultiplicativeGroup a
) => [Law a]
multiplicativeGroupLaws =
[ ( "divide: a == zero || a / a ≈ one", Unary (\a -> a == zero || (a / a) ≈ one))
, ( "recip divide: recip a == one / a", Unary (\a -> recip a == one / a))
, ( "recip left: a == zero || recip a * a ≈ one"
, Unary (\a -> a == zero || recip a * a ≈ one))
, ( "recip right: a == zero || a * recip a ≈ one"
, Unary (\a -> a == zero || a * recip a ≈ one))
]
distributionLaws ::
( Eq a
, Distribution a
) => [Law a]
distributionLaws =
[ ("annihilation: a * zero == zero", Unary (\a -> a `times` zero == zero))
, ("left distributivity: a * (b + c) == a * b + a * c"
, Ternary (\a b c -> a `times` (b + c) == a `times` b + a `times` c))
, ("right distributivity: (a + b) * c == a * c + b * c"
, Ternary (\a b c -> (a + b) `times` c == a `times` c + b `times` c))
]
distributionLawsApprox ::
( Epsilon a
, Eq a
, Distribution a
) => [Law a]
distributionLawsApprox =
[ ("annihilation: a * zero == zero", Unary (\a -> a `times` zero == zero))
, ("left distributivity: a * (b + c) ≈ a * b + a * c"
, Ternary (\a b c -> a `times` (b + c) ≈ a `times` b + a `times` c))
, ("right distributivity: (a + b) * c ≈ a * c + b * c"
, Ternary (\a b c -> (a + b) `times` c ≈ a `times` c + b `times` c))
]
distributionLawsFail ::
( Show a
, Arbitrary a
, Epsilon a
, Eq a
, Distribution a
) => [Law a]
distributionLawsFail =
[ ("annihilation: a * zero == zero", Unary (\a -> a `times` zero == zero))
, ("left distributivity: a * (b + c) = a * b + a * c"
, Failiary $ expectFailure .
(\a b c -> a `times` (b + c) == a `times` b + a `times` c))
, ("right distributivity: (a + b) * c = a * c + b * c"
, Failiary $ expectFailure . (\a b c -> (a + b) `times` c == a `times` c + b `times` c))
]
signedLaws ::
( Eq a
, Signed a
) => [Law a]
signedLaws =
[ ("sign a * abs a == a", Unary (\a -> sign a `times` abs a == a))
]
integralLaws ::
( Eq a
, Integral a
, FromInteger a
, ToInteger a
) => [Law a]
integralLaws =
[ ( "integral divmod: b == zero || b * (a `div` b) + (a `mod` b) == a"
, Binary (\a b -> b == zero || b `times` (a `div` b) + (a `mod` b) == a))
, ( "fromIntegral a = a"
, Unary (\a -> fromIntegral a == a))
]
boundedFieldLaws ::
( Ord a
, BoundedField a
) => [Law a]
boundedFieldLaws =
[ ("infinity laws"
, Unary (\a ->
((one :: Float)/zero + infinity == infinity) &&
(infinity + a == infinity) &&
isNaN ((infinity :: Float) - infinity) &&
isNaN ((infinity :: Float) / infinity) &&
isNaN (nan + a) &&
(zero :: Float)/zero /= nan))
]
prettyPositive :: (Epsilon a, Ord a) => a -> Bool
prettyPositive a = not (nearZero a) && a > zero
kindaPositive :: (Epsilon a, Ord a) => a -> Bool
kindaPositive a = nearZero a || a > zero
metricRepFloatLaws ::
( Representable r
, Foldable r
) => [Law (r Float)]
metricRepFloatLaws =
[ ( "positive"
, Binary (\a b -> distance a b >= (zero::Float)))
, ( "zero if equal"
, Unary (\a -> distance a a == (zero::Float)))
, ( "associative"
, Binary (\a b -> distance a b ≈ (distance b a :: Float)))
, ( "triangle rule - sum of distances > distance"
, Ternary
(\a b c ->
kindaPositive
(distance a c + distance b c - (distance a b :: Float)) &&
kindaPositive
(distance a b + distance b c - (distance a c :: Float)) &&
kindaPositive
(distance a b + distance a c - (distance b c :: Float))))
]
metricFloatLaws ::
(
) => [Law Float]
metricFloatLaws =
[ ( "positive"
, Binary (\a b -> (distance a b :: Float) >= zero))
, ("zero if equal"
, Unary (\a -> (distance a a :: Float) == zero))
, ( "associative"
, Binary (\a b -> (distance a b :: Float) ≈ (distance b a :: Float)))
, ( "triangle rule - sum of distances > distance"
, Ternary (\a b c ->
(abs a > 10.0) ||
(abs b > 10.0) ||
(abs c > 10.0) ||
kindaPositive (distance a c + distance b c - (distance a b :: Float)) &&
kindaPositive (distance a b + distance b c - (distance a c :: Float)) &&
kindaPositive (distance a b + distance a c - (distance b c :: Float))))
]
quotientFieldLaws ::
( Ord a
, Field a
, QuotientField a
, FromInteger a
) => [Law a]
quotientFieldLaws =
[ ("x-1 < floor <= x <= ceiling < x+1"
, Unary (\a ->
((a - one) < fromIntegral (floor a)) &&
(fromIntegral (floor a) <= a) &&
(a <= fromIntegral (ceiling a)) &&
(fromIntegral (ceiling a) < a + one)))
, ("round == floor (x + 1/2)"
, Unary (\a -> round a == floor (a + one/(one+one))
))
]
expRingLaws ::
( ExpRing a
, Epsilon a
, Ord a
) => [Law a]
expRingLaws =
[ ("for +ive b, a != 0,1: a ** logBase a b ≈ b"
, Binary (\a b ->
( not (prettyPositive b) ||
not (nearZero (a - zero)) ||
(a == one) ||
(a == zero && nearZero (logBase a b)) ||
(a ** logBase a b ≈ b))))
]
expRingRepLaws ::
( Representable r
, Foldable r
, ExpRing a
, Epsilon a
, Ord a
) => [Law (r a)]
expRingRepLaws =
[ ("for +ive b, a != 0,1: a ** logBase a b ≈ b"
, Binary (\a b ->
( not (all prettyPositive b) ||
not (all nearZero a) ||
all (==one) a ||
(all (==zero) a && all nearZero (logBase a b)) ||
(a ** logBase a b ≈ b))))
]
expFieldLaws ::
( ExpField a
, Epsilon a
, Fractional a
, Ord a
) => [Law a]
expFieldLaws =
[ ("sqrt . (**2) ≈ id"
, Unary (\a -> not (prettyPositive a) || (a > 10.0) ||
(sqrt . (**(one+one)) $ a) ≈ a &&
((**(one+one)) . sqrt $ a) ≈ a))
, ("log . exp ≈ id"
, Unary (\a -> not (prettyPositive a) || (a > 10.0) ||
(log . exp $ a) ≈ a &&
(exp . log $ a) ≈ a))
]
expFieldRepLaws ::
( Representable r
, Foldable r
, ExpField a
, Epsilon a
, Fractional a
, Ord a
) => [Law (r a)]
expFieldRepLaws =
[ ("sqrt . (**2) ≈ id"
, Unary (\a -> not (all prettyPositive a) || any (>10.0) a ||
(sqrt . (**(one+one)) $ a) ≈ a &&
((**(one+one)) . sqrt $ a) ≈ a))
, ("log . exp ≈ id"
, Unary (\a -> not (all prettyPositive a) || any (>10.0) a ||
(log . exp $ a) ≈ a &&
(exp . log $ a) ≈ a))
]
additiveModuleLaws ::
( Eq (r a)
, Epsilon a
, Foldable r
, AdditiveModule r a
) => [Law2 (r a) a]
additiveModuleLaws =
[
("additive module associative: (a + b) .+ c ≈ a + (b .+ c)"
, Ternary2 (\a b c -> (a + b) .+ c ≈ a + (b .+ c)))
, ("additive module commutative: (a + b) .+ c ≈ (a .+ c) + b"
, Ternary2 (\a b c -> (a + b) .+ c ≈ (a .+ c) + b))
, ("additive module unital: a .+ zero == a"
, Unary2 (\a -> a .+ zero == a))
, ("module additive equivalence: a .+ b ≈ b +. a"
, Binary2 (\a b -> a .+ b ≈ b +. a))
]
additiveModuleLawsFail ::
( Eq (r a)
, Show a
, Arbitrary a
, Show (r a)
, Arbitrary (r a)
, Epsilon a
, AdditiveModule r a
) => [Law2 (r a) a]
additiveModuleLawsFail =
[
("additive module associative: (a + b) .+ c == a + (b .+ c)"
, Failiary2 $ expectFailure . (\a b c -> (a + b) .+ c == a + (b .+ c)))
, ("additive module commutative: (a + b) .+ c == (a .+ c) + b"
, Failiary2 $ expectFailure . (\a b c -> (a + b) .+ c == (a .+ c) + b))
, ("additive module unital: a .+ zero == a"
, Unary2 (\a -> a .+ zero == a))
, ("module additive equivalence: a .+ b == b +. a"
, Binary2 (\a b -> a .+ b == b +. a))
]
additiveGroupModuleLaws ::
( Eq (r a)
, Epsilon a
, Foldable r
, AdditiveGroupModule r a
) => [Law2 (r a) a]
additiveGroupModuleLaws =
[
("additive group module associative: (a + b) .- c ≈ a + (b .- c)"
, Ternary2 (\a b c -> (a + b) .- c ≈ a + (b .- c)))
, ("additive group module commutative: (a + b) .- c ≈ (a .- c) + b"
, Ternary2 (\a b c -> (a + b) .- c ≈ (a .- c) + b))
, ("additive group module unital: a .- zero == a"
, Unary2 (\a -> a .- zero == a))
, ("additive group module basis unital: a .- zero ≈ pureRep a"
, Binary2 (\a b -> b -. (a-a) ≈ pureRep b))
, ("module additive group equivalence: a .- b ≈ negate b +. a"
, Binary2 (\a b -> a .- b ≈ negate b +. a))
]
additiveGroupModuleLawsFail ::
( Eq (r a)
, Show a
, Arbitrary a
, Show (r a)
, Arbitrary (r a)
, Epsilon a
, Foldable r
, AdditiveGroupModule r a
) => [Law2 (r a) a]
additiveGroupModuleLawsFail =
[
("additive group module associative: (a + b) .- c == a + (b .- c)"
, Failiary2 $ expectFailure . (\a b c -> (a + b) .- c == a + (b .- c)))
, ("additive group module commutative: (a + b) .- c == (a .- c) + b"
, Failiary2 $ expectFailure . (\a b c -> (a + b) .- c == (a .- c) + b))
, ("additive group module unital: a .- zero == a"
, Unary2 (\a -> a .- zero == a))
, ("additive group module basis unital: a .- zero == pureRep a"
, Binary2 (\a b -> b -. (a-a) == pureRep b))
, ("module additive group equivalence: a .- b ≈ negate b +. a"
, Binary2 (\a b -> a .- b ≈ negate b +. a))
]
multiplicativeModuleLaws ::
( Eq (r a)
, Epsilon a
, Foldable r
, AdditiveModule r a
, MultiplicativeModule r a
) => [Law2 (r a) a]
multiplicativeModuleLaws =
[ ("multiplicative module associative: (a * b) .* c ≈ a * (b .* c)"
, Ternary2 (\a b c -> (a * b) .* c ≈ a * (b .* c)))
, ("multiplicative module commutative: (a * b) .* c ≈ (a .* c) * b"
, Ternary2 (\a b c -> (a * b) .* c ≈ a * (b .* c)))
, ("multiplicative module unital: a .* one == a"
, Unary2 (\a -> a .* one == a))
, ("module right distribution: (a + b) .* c ≈ (a .* c) + (b .* c)"
, Ternary2 (\a b c -> (a + b) .* c ≈ (a .* c) + (b .* c)))
, ("module left distribution: c *. (a + b) ≈ (c *. a) + (c *. b)"
, Ternary2 (\a b c -> c *. (a + b) ≈ (c *. a) + (c *. b)))
, ("annihilation: a .* zero == zero", Unary2 (\a -> a .* zero == zero))
, ("module multiplicative equivalence: a .* b ≈ b *. a"
, Binary2 (\a b -> a .* b ≈ b *. a))
]
multiplicativeModuleLawsFail ::
( Eq (r a)
, Epsilon a
, Show a
, Arbitrary a
, Show (r a)
, Arbitrary (r a)
, Foldable r
, AdditiveModule r a
, MultiplicativeModule r a
) => [Law2 (r a) a]
multiplicativeModuleLawsFail =
[ ("multiplicative module associative: (a * b) .* c == a * (b .* c)"
, Failiary2 $ expectFailure . (\a b c -> (a * b) .* c == a * (b .* c)))
, ("multiplicative module commutative: (a * b) .* c == (a .* c) * b"
, Failiary2 $ expectFailure . (\a b c -> (a * b) .* c == a * (b .* c)))
, ("multiplicative module unital: a .* one == a"
, Unary2 (\a -> a .* one == a))
, ("module right distribution: (a + b) .* c == (a .* c) + (b .* c)"
, Failiary2 $ expectFailure . (\a b c -> (a + b) .* c == (a .* c) + (b .* c)))
, ("module left distribution: c *. (a + b) == (c *. a) + (c *. b)"
, Failiary2 $ expectFailure . (\a b c -> c *. (a + b) == (c *. a) + (c *. b)))
, ("annihilation: a .* zero == zero", Unary2 (\a -> a .* zero == zero))
, ("module multiplicative equivalence: a .* b ≈ b *. a"
, Binary2 (\a b -> a .* b ≈ b *. a))
]
multiplicativeGroupModuleLaws ::
( Eq (r a)
, Eq a
, Epsilon a
, Foldable r
, MultiplicativeGroupModule r a
) => [Law2 (r a) a]
multiplicativeGroupModuleLaws =
[
("multiplicative group module associative: (a * b) ./ c ≈ a * (b ./ c)"
, Ternary2 (\a b c -> c==zero || (a * b) ./ c ≈ a * (b ./ c)))
, ("multiplicative group module commutative: (a * b) ./ c ≈ (a ./ c) * b"
, Ternary2 (\a b c -> c==zero || (a * b) ./ c ≈ (a ./ c) * b))
, ("multiplicative group module unital: a ./ one == a"
, Unary2 (\a -> nearZero a || a ./ one == a))
, ("multiplicative group module basis unital: a /. one ≈ pureRep a"
, Binary2 (\a b -> a==zero || b /. (a/a) ≈ pureRep b))
, ("module multiplicative group equivalence: a ./ b ≈ recip b *. a"
, Binary2 (\a b -> b==zero || a ./ b ≈ recip b *. a))
]
multiplicativeGroupModuleLawsFail ::
( Eq a
, Show a
, Arbitrary a
, Eq (r a)
, Show (r a)
, Arbitrary (r a)
, Epsilon a
, Foldable r
, MultiplicativeGroupModule r a
) => [Law2 (r a) a]
multiplicativeGroupModuleLawsFail =
[
("multiplicative group module associative: (a * b) ./ c == a * (b ./ c)"
, Failiary2 $ expectFailure .
(\a b c -> c==zero || (a * b) ./ c == a * (b ./ c)))
, ("multiplicative group module commutative: (a * b) ./ c ≈ (a ./ c) * b"
, Ternary2 (\a b c -> c==zero || (a * b) ./ c ≈ (a ./ c) * b))
, ("multiplicative group module unital: a ./ one == a"
, Unary2 (\a -> nearZero a || a ./ one == a))
, ("multiplicative group module basis unital: a /. one ≈ pureRep a"
, Binary2 (\a b -> a==zero || b /. (a/a) ≈ pureRep b))
, ("module multiplicative group equivalence: a ./ b ≈ recip b *. a"
, Binary2 (\a b -> b==zero || a ./ b ≈ recip b *. a))
]
additiveBasisLaws ::
( Eq (r a)
, Foldable r
, Epsilon a
, AdditiveBasis r a
) => [Law (r a)]
additiveBasisLaws =
[ ( "associative: (a .+. b) .+. c ≈ a .+. (b .+. c)"
, Ternary (\a b c -> (a .+. b) .+. c ≈ a .+. (b .+. c)))
, ("left id: zero .+. a = a", Unary (\a -> zero .+. a == a))
, ("right id: a .+. zero = a", Unary (\a -> a .+. zero == a))
, ("commutative: a .+. b == b .+. a", Binary (\a b -> a .+. b == b .+. a))
]
additiveGroupBasisLaws ::
( Eq (r a)
, AdditiveGroupBasis r a
) => [Law (r a)]
additiveGroupBasisLaws =
[ ("minus: a .-. a = pureRep zero", Unary (\a -> (a .-. a) == pureRep zero))
]
multiplicativeBasisLaws ::
( Eq (r a)
, MultiplicativeBasis r a
) => [Law (r a)]
multiplicativeBasisLaws =
[ ("associative: (a .*. b) .*. c == a .*. (b .*. c)"
, Ternary (\a b c -> (a .*. b) .*. c == a .*. (b .*. c)))
, ("left id: one .*. a = a", Unary (\a -> one .*. a == a))
, ("right id: a .*. one = a", Unary (\a -> a .*. one == a))
, ("commutative: a .*. b == b .*. a", Binary (\a b -> a .*. b == b * a))
]
multiplicativeBasisLawsFail ::
( Eq (r a)
, Show (r a)
, Arbitrary (r a)
, MultiplicativeBasis r a
) => [Law (r a)]
multiplicativeBasisLawsFail =
[ ("associative: (a .*. b) .*. c == a .*. (b .*. c)"
, Failiary $ expectFailure . (\a b c -> (a .*. b) .*. c == a .*. (b .*. c)))
, ("left id: one .*. a = a", Unary (\a -> one .*. a == a))
, ("right id: a .*. one = a", Unary (\a -> a .*. one == a))
, ("commutative: a .*. b == b .*. a", Binary (\a b -> a .*. b == b * a))
]
multiplicativeGroupBasisLaws ::
( Eq (r a)
, Epsilon a
, Foldable r
, MultiplicativeGroupBasis r a
) => [Law (r a)]
multiplicativeGroupBasisLaws =
[ ("minus: a ./. a ≈ pureRep one", Unary (\a -> a==pureRep zero || (a ./. a) ≈ pureRep one))
]
banachLaws ::
( Eq (r a)
, Epsilon b
, MultiplicativeGroup b
, Banach r a
, Normed (r a) b
) => [Law2 (r a) b]
banachLaws =
[ -- Banach
( "size (normalize a) ≈ one"
, Binary2 (\a b -> a==pureRep zero || size (normalize a) ≈ (b/b)))
]
| tonyday567/tower | test/test.hs | bsd-3-clause | 30,551 | 0 | 21 | 9,075 | 11,442 | 6,208 | 5,234 | 750 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Aws.DynamoDb.Core
-- Copyright : Soostone Inc, Chris Allen
-- License : BSD3
--
-- Maintainer : Ozgun Ataman <[email protected]>
-- Stability : experimental
--
-- Shared types and utilities for DyanmoDb functionality.
----------------------------------------------------------------------------
module Aws.DynamoDb.Core
(
-- * Configuration and Regions
Region (..)
, ddbLocal
, ddbUsEast1
, ddbUsWest1
, ddbUsWest2
, ddbEuWest1
, ddbEuCentral1
, ddbApNe1
, ddbApSe1
, ddbApSe2
, ddbSaEast1
, DdbConfiguration (..)
-- * DynamoDB values
, DValue (..)
-- * Converting to/from 'DValue'
, DynVal(..)
, toValue, fromValue
, Bin (..)
-- * Defining new 'DynVal' instances
, DynData(..)
, DynBinary(..), DynNumber(..), DynString(..)
-- * Working with key/value pairs
, Attribute (..)
, parseAttributeJson
, attributeJson
, attributesJson
, attrTuple
, attr
, attrAs
, text, int, double
, PrimaryKey (..)
, hk
, hrk
-- * Working with objects (attribute collections)
, Item
, item
, attributes
, ToDynItem (..)
, FromDynItem (..)
, fromItem
, Parser (..)
, getAttr
, getAttr'
-- * Common types used by operations
, Conditions (..)
, conditionsJson
, expectsJson
, Condition (..)
, conditionJson
, CondOp (..)
, CondMerge (..)
, ConsumedCapacity (..)
, ReturnConsumption (..)
, ItemCollectionMetrics (..)
, ReturnItemCollectionMetrics (..)
, UpdateReturn (..)
, QuerySelect (..)
, querySelectJson
-- * Size estimation
, DynSize (..)
, nullAttr
-- * Responses & Errors
, DdbResponse (..)
, DdbErrCode (..)
, shouldRetry
, DdbError (..)
-- * Internal Helpers
, ddbSignQuery
, AmazonError (..)
, ddbResponseConsumer
, ddbHttp
, ddbHttps
) where
-------------------------------------------------------------------------------
import Control.Applicative
import qualified Control.Exception as C
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Trans.Resource (throwM)
import Crypto.Hash
import Data.Aeson
import qualified Data.Aeson as A
import Data.Aeson.Types (Pair, parseEither)
import qualified Data.Aeson.Types as A
import qualified Data.Attoparsec.ByteString as AttoB (endOfInput)
import qualified Data.Attoparsec.Text as Atto
import Data.Byteable
import qualified Data.ByteString.Base16 as Base16
import qualified Data.ByteString.Base64 as Base64
import qualified Data.ByteString.Char8 as B
import qualified Data.CaseInsensitive as CI
import Data.Conduit
import Data.Conduit.Attoparsec (sinkParser)
import Data.Default
import Data.Function (on)
import qualified Data.HashMap.Strict as HM
import Data.Int
import Data.IORef
import Data.List
import qualified Data.Map as M
import Data.Maybe
import Data.Monoid
import Data.Proxy
import Data.Scientific
import qualified Data.Serialize as Ser
import qualified Data.Set as S
import Data.String
import Data.Tagged
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Time
import Data.Typeable
import Data.Word
import qualified Network.HTTP.Conduit as HTTP
import qualified Network.HTTP.Types as HTTP
import Safe
-------------------------------------------------------------------------------
import Aws.Core
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-- | Numeric values stored in DynamoDb. Only used in defining new
-- 'DynVal' instances.
newtype DynNumber = DynNumber { unDynNumber :: Scientific }
deriving (Eq,Show,Read,Ord,Typeable)
-------------------------------------------------------------------------------
-- | String values stored in DynamoDb. Only used in defining new
-- 'DynVal' instances.
newtype DynString = DynString { unDynString :: T.Text }
deriving (Eq,Show,Read,Ord,Typeable)
-------------------------------------------------------------------------------
-- | Binary values stored in DynamoDb. Only used in defining new
-- 'DynVal' instances.
newtype DynBinary = DynBinary { unDynBinary :: B.ByteString }
deriving (Eq,Show,Read,Ord,Typeable)
-------------------------------------------------------------------------------
-- | An internally used closed typeclass for values that have direct
-- DynamoDb representations. Based on AWS API, this is basically
-- numbers, strings and binary blobs.
--
-- This is here so that any 'DynVal' haskell value can automatically
-- be lifted to a list or a 'Set' without any instance code
-- duplication.
--
-- Do not try to create your own instances.
class Ord a => DynData a where
fromData :: a -> DValue
toData :: DValue -> Maybe a
instance DynData DynNumber where
fromData (DynNumber i) = DNum i
toData (DNum i) = Just $ DynNumber i
toData _ = Nothing
instance DynData (S.Set DynNumber) where
fromData set = DNumSet (S.map unDynNumber set)
toData (DNumSet i) = Just $ S.map DynNumber i
toData _ = Nothing
instance DynData DynString where
fromData (DynString i) = DString i
toData (DString i) = Just $ DynString i
toData _ = Nothing
instance DynData (S.Set DynString) where
fromData set = DStringSet (S.map unDynString set)
toData (DStringSet i) = Just $ S.map DynString i
toData _ = Nothing
instance DynData DynBinary where
fromData (DynBinary i) = DBinary i
toData (DBinary i) = Just $ DynBinary i
toData _ = Nothing
instance DynData (S.Set DynBinary) where
fromData set = DBinSet (S.map unDynBinary set)
toData (DBinSet i) = Just $ S.map DynBinary i
toData _ = Nothing
instance DynData DValue where
fromData = id
toData = Just
-------------------------------------------------------------------------------
-- | Class of Haskell types that can be represented as DynamoDb values.
--
-- This is the conversion layer; instantiate this class for your own
-- types and then use the 'toValue' and 'fromValue' combinators to
-- convert in application code.
--
-- Each Haskell type instantiated with this class will map to a
-- DynamoDb-supported type that most naturally represents it.
class DynData (DynRep a) => DynVal a where
-- | Which of the 'DynData' instances does this data type directly
-- map to?
type DynRep a
-- | Convert to representation
toRep :: a -> DynRep a
-- | Convert from representation
fromRep :: DynRep a -> Maybe a
-------------------------------------------------------------------------------
-- | Any singular 'DynVal' can be upgraded to a list.
instance (DynData (DynRep [a]), DynVal a) => DynVal [a] where
type DynRep [a] = S.Set (DynRep a)
fromRep set = mapM fromRep $ S.toList set
toRep as = S.fromList $ map toRep as
-------------------------------------------------------------------------------
-- | Any singular 'DynVal' can be upgraded to a 'Set'.
instance (DynData (DynRep (S.Set a)), DynVal a, Ord a) => DynVal (S.Set a) where
type DynRep (S.Set a) = S.Set (DynRep a)
fromRep set = fmap S.fromList . mapM fromRep $ S.toList set
toRep as = S.map toRep as
instance DynVal DValue where
type DynRep DValue = DValue
fromRep = Just
toRep = id
instance DynVal Int where
type DynRep Int = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Int8 where
type DynRep Int8 = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Int16 where
type DynRep Int16 = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Int32 where
type DynRep Int32 = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Int64 where
type DynRep Int64 = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Word8 where
type DynRep Word8 = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Word16 where
type DynRep Word16 = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Word32 where
type DynRep Word32 = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Word64 where
type DynRep Word64 = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Integer where
type DynRep Integer = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal T.Text where
type DynRep T.Text = DynString
fromRep (DynString i) = Just i
toRep i = DynString i
instance DynVal B.ByteString where
type DynRep B.ByteString = DynBinary
fromRep (DynBinary i) = Just i
toRep i = DynBinary i
instance DynVal Double where
type DynRep Double = DynNumber
fromRep (DynNumber i) = Just $ toRealFloat i
toRep i = DynNumber (fromFloatDigits i)
-------------------------------------------------------------------------------
-- | Encoded as number of days
instance DynVal Day where
type DynRep Day = DynNumber
fromRep (DynNumber i) = ModifiedJulianDay <$> (toIntegral i)
toRep (ModifiedJulianDay i) = DynNumber (fromIntegral i)
-------------------------------------------------------------------------------
-- | Losslessly encoded via 'Integer' picoseconds
instance DynVal UTCTime where
type DynRep UTCTime = DynNumber
fromRep num = fromTS <$> fromRep num
toRep x = toRep (toTS x)
-------------------------------------------------------------------------------
pico :: Rational
pico = toRational $ 10 ^ (12 :: Integer)
-------------------------------------------------------------------------------
dayPico :: Integer
dayPico = 86400 * round pico
-------------------------------------------------------------------------------
-- | Convert UTCTime to picoseconds
--
-- TODO: Optimize performance?
toTS :: UTCTime -> Integer
toTS (UTCTime (ModifiedJulianDay i) diff) = i' + diff'
where
diff' = floor (toRational diff * pico)
i' = i * dayPico
-------------------------------------------------------------------------------
-- | Convert picoseconds to UTCTime
--
-- TODO: Optimize performance?
fromTS :: Integer -> UTCTime
fromTS i = UTCTime (ModifiedJulianDay days) diff
where
(days, secs) = i `divMod` dayPico
diff = fromRational ((toRational secs) / pico)
-- | Encoded as 0 and 1.
instance DynVal Bool where
type DynRep Bool = DynNumber
fromRep (DynNumber i) = do
(i' :: Int) <- toIntegral i
case i' of
0 -> return False
1 -> return True
_ -> Nothing
toRep b = DynNumber (if b then 1 else 0)
-- | Type wrapper for binary data to be written to DynamoDB. Wrap any
-- 'Serialize' instance in there and 'DynVal' will know how to
-- automatically handle conversions in binary form.
newtype Bin a = Bin { getBin :: a }
deriving (Eq,Show,Read,Ord,Typeable,Enum)
instance (Ser.Serialize a) => DynVal (Bin a) where
type DynRep (Bin a) = DynBinary
toRep (Bin i) = DynBinary (Ser.encode i)
fromRep (DynBinary i) = either (const Nothing) (Just . Bin) $
Ser.decode i
-------------------------------------------------------------------------------
-- | Encode a Haskell value.
toValue :: DynVal a => a -> DValue
toValue a = fromData $ toRep a
-------------------------------------------------------------------------------
-- | Decode a Haskell value.
fromValue :: DynVal a => DValue -> Maybe a
fromValue d = toData d >>= fromRep
toIntegral :: (Integral a, RealFrac a1) => a1 -> Maybe a
toIntegral sc = Just $ floor sc
-- | Value types natively recognized by DynamoDb. We pretty much
-- exactly reflect the AWS API onto Haskell types.
data DValue
= DNum Scientific
| DString T.Text
| DBinary B.ByteString
-- ^ Binary data will automatically be base64 marshalled.
| DNumSet (S.Set Scientific)
| DStringSet (S.Set T.Text)
| DBinSet (S.Set B.ByteString)
-- ^ Binary data will automatically be base64 marshalled.
deriving (Eq,Show,Read,Ord,Typeable)
instance IsString DValue where
fromString t = DString (T.pack t)
-------------------------------------------------------------------------------
-- | Primary keys consist of either just a Hash key (mandatory) or a
-- hash key and a range key (optional).
data PrimaryKey = PrimaryKey {
pkHash :: Attribute
, pkRange :: Maybe Attribute
} deriving (Read,Show,Ord,Eq,Typeable)
-------------------------------------------------------------------------------
-- | Construct a hash-only primary key.
--
-- >>> hk "user-id" "ABCD"
--
-- >>> hk "user-id" (mkVal 23)
hk :: T.Text -> DValue -> PrimaryKey
hk k v = PrimaryKey (attr k v) Nothing
-------------------------------------------------------------------------------
-- | Construct a hash-and-range primary key.
hrk :: T.Text -- ^ Hash key name
-> DValue -- ^ Hash key value
-> T.Text -- ^ Range key name
-> DValue -- ^ Range key value
-> PrimaryKey
hrk k v k2 v2 = PrimaryKey (attr k v) (Just (attr k2 v2))
instance ToJSON PrimaryKey where
toJSON (PrimaryKey h Nothing) = toJSON h
toJSON (PrimaryKey h (Just r)) =
let Object p1 = toJSON h
Object p2 = toJSON r
in Object (p1 `HM.union` p2)
-- | A key-value pair
data Attribute = Attribute {
attrName :: T.Text
, attrVal :: DValue
} deriving (Read,Show,Ord,Eq,Typeable)
-- | Convert attribute to a tuple representation
attrTuple :: Attribute -> (T.Text, DValue)
attrTuple (Attribute a b) = (a,b)
-- | Convenience function for constructing key-value pairs
attr :: DynVal a => T.Text -> a -> Attribute
attr k v = Attribute k (toValue v)
-- | 'attr' with type witness to help with cases where you're manually
-- supplying values in code.
--
-- >> item [ attrAs text "name" "john" ]
attrAs :: DynVal a => Proxy a -> T.Text -> a -> Attribute
attrAs _ k v = attr k v
-- | Type witness for 'Text'. See 'attrAs'.
text :: Proxy T.Text
text = Proxy
-- | Type witness for 'Integer'. See 'attrAs'.
int :: Proxy Integer
int = Proxy
-- | Type witness for 'Double'. See 'attrAs'.
double :: Proxy Double
double = Proxy
-- | A DynamoDb object is simply a key-value dictionary.
type Item = M.Map T.Text DValue
-------------------------------------------------------------------------------
-- | Pack a list of attributes into an Item.
item :: [Attribute] -> Item
item = M.fromList . map attrTuple
-------------------------------------------------------------------------------
-- | Unpack an 'Item' into a list of attributes.
attributes :: M.Map T.Text DValue -> [Attribute]
attributes = map (\ (k, v) -> Attribute k v) . M.toList
showT :: Show a => a -> T.Text
showT = T.pack . show
instance ToJSON DValue where
toJSON (DNum i) = object ["N" .= showT i]
toJSON (DString i) = object ["S" .= i]
toJSON (DBinary i) = object ["B" .= (T.decodeUtf8 $ Base64.encode i)]
toJSON (DNumSet i) = object ["NS" .= map showT (S.toList i)]
toJSON (DStringSet i) = object ["SS" .= S.toList i]
toJSON (DBinSet i) = object ["BS" .= map (T.decodeUtf8 . Base64.encode) (S.toList i)]
toJSON x = error $ "aws: bug: DynamoDB can't handle " ++ show x
instance FromJSON DValue where
parseJSON o = do
(obj :: [(T.Text, Value)]) <- M.toList `liftM` parseJSON o
case obj of
[("N", numStr)] -> DNum <$> parseScientific numStr
[("S", str)] -> DString <$> parseJSON str
[("B", bin)] -> do
res <- (Base64.decode . T.encodeUtf8) <$> parseJSON bin
either fail (return . DBinary) res
[("NS", s)] -> do xs <- mapM parseScientific =<< parseJSON s
return $ DNumSet $ S.fromList xs
[("SS", s)] -> DStringSet <$> parseJSON s
[("BS", s)] -> do
xs <- mapM (either fail return . Base64.decode . T.encodeUtf8)
=<< parseJSON s
return $ DBinSet $ S.fromList xs
x -> fail $ "aws: unknown dynamodb value: " ++ show x
where
parseScientific (String str) =
case Atto.parseOnly Atto.scientific str of
Left e -> fail ("parseScientific failed: " ++ e)
Right a -> return a
parseScientific (Number n) = return n
parseScientific _ = fail "Unexpected JSON type in parseScientific"
instance ToJSON Attribute where
toJSON a = object $ [attributeJson a]
-------------------------------------------------------------------------------
-- | Parse a JSON object that contains attributes
parseAttributeJson :: Value -> A.Parser [Attribute]
parseAttributeJson (Object v) = mapM conv $ HM.toList v
where
conv (k, o) = Attribute k <$> parseJSON o
parseAttributeJson _ = error "Attribute JSON must be an Object"
-- | Convert into JSON object for AWS.
attributesJson :: [Attribute] -> Value
attributesJson as = object $ map attributeJson as
-- | Convert into JSON pair
attributeJson :: Attribute -> Pair
attributeJson (Attribute nm v) = nm .= v
-------------------------------------------------------------------------------
-- | Errors defined by AWS.
data DdbErrCode
= AccessDeniedException
| ConditionalCheckFailedException
| IncompleteSignatureException
| InvalidSignatureException
| LimitExceededException
| MissingAuthenticationTokenException
| ProvisionedThroughputExceededException
| ResourceInUseException
| ResourceNotFoundException
| ThrottlingException
| ValidationException
| RequestTooLarge
| InternalFailure
| InternalServerError
| ServiceUnavailableException
| SerializationException
-- ^ Raised by AWS when the request JSON is missing fields or is
-- somehow malformed.
deriving (Read,Show,Eq,Typeable)
-------------------------------------------------------------------------------
-- | Whether the action should be retried based on the received error.
shouldRetry :: DdbErrCode -> Bool
shouldRetry e = go e
where
go LimitExceededException = True
go ProvisionedThroughputExceededException = True
go ResourceInUseException = True
go ThrottlingException = True
go InternalFailure = True
go InternalServerError = True
go ServiceUnavailableException = True
go _ = False
-------------------------------------------------------------------------------
-- | Errors related to this library.
data DdbLibraryError
= UnknownDynamoErrCode T.Text
-- ^ A DynamoDB error code we do not know about.
| JsonProtocolError Value T.Text
-- ^ A JSON response we could not parse.
deriving (Show,Eq,Typeable)
-- | Potential errors raised by DynamoDB
data DdbError = DdbError {
ddbStatusCode :: Int
-- ^ 200 if successful, 400 for client errors and 500 for
-- server-side errors.
, ddbErrCode :: DdbErrCode
, ddbErrMsg :: T.Text
} deriving (Show,Eq,Typeable)
instance C.Exception DdbError
instance C.Exception DdbLibraryError
-- | Response metadata that is present in every DynamoDB response.
data DdbResponse = DdbResponse {
ddbrCrc :: Maybe T.Text
, ddbrMsgId :: Maybe T.Text
}
instance Loggable DdbResponse where
toLogText (DdbResponse id2 rid) =
"DynamoDB: request ID=" `mappend`
fromMaybe "<none>" rid `mappend`
", x-amz-id-2=" `mappend`
fromMaybe "<none>" id2
instance Monoid DdbResponse where
mempty = DdbResponse Nothing Nothing
mappend a b = DdbResponse (ddbrCrc a `mplus` ddbrCrc b) (ddbrMsgId a `mplus` ddbrMsgId b)
data Region = Region {
rUri :: B.ByteString
, rName :: B.ByteString
} deriving (Eq,Show,Read,Typeable)
data DdbConfiguration qt = DdbConfiguration {
ddbcRegion :: Region
-- ^ The regional endpoint. Ex: 'ddbUsEast'
, ddbcProtocol :: Protocol
-- ^ 'HTTP' or 'HTTPS'
, ddbcPort :: Maybe Int
-- ^ Port override (mostly for local dev connection)
} deriving (Show,Typeable)
instance Default (DdbConfiguration NormalQuery) where
def = DdbConfiguration ddbUsEast1 HTTPS Nothing
instance DefaultServiceConfiguration (DdbConfiguration NormalQuery) where
defServiceConfig = ddbHttps ddbUsEast1
debugServiceConfig = ddbHttp ddbUsEast1
-------------------------------------------------------------------------------
-- | DynamoDb local connection (for development)
ddbLocal :: Region
ddbLocal = Region "127.0.0.1" "local"
ddbUsEast1 :: Region
ddbUsEast1 = Region "dynamodb.us-east-1.amazonaws.com" "us-east-1"
ddbUsWest1 :: Region
ddbUsWest1 = Region "dynamodb.us-west-1.amazonaws.com" "us-west-1"
ddbUsWest2 :: Region
ddbUsWest2 = Region "dynamodb.us-west-2.amazonaws.com" "us-west-2"
ddbEuWest1 :: Region
ddbEuWest1 = Region "dynamodb.eu-west-1.amazonaws.com" "eu-west-1"
ddbEuCentral1 :: Region
ddbEuCentral1 = Region "dynamodb.eu-central-1.amazonaws.com" "eu-central-1"
ddbApNe1 :: Region
ddbApNe1 = Region "dynamodb.ap-northeast-1.amazonaws.com" "ap-northeast-1"
ddbApSe1 :: Region
ddbApSe1 = Region "dynamodb.ap-southeast-1.amazonaws.com" "ap-southeast-1"
ddbApSe2 :: Region
ddbApSe2 = Region "dynamodb.ap-southeast-2.amazonaws.com" "ap-southeast-2"
ddbSaEast1 :: Region
ddbSaEast1 = Region "dynamodb.sa-east-1.amazonaws.com" "sa-east-1"
ddbHttp :: Region -> DdbConfiguration NormalQuery
ddbHttp endpoint = DdbConfiguration endpoint HTTP Nothing
ddbHttps :: Region -> DdbConfiguration NormalQuery
ddbHttps endpoint = DdbConfiguration endpoint HTTPS Nothing
ddbSignQuery
:: A.ToJSON a
=> B.ByteString
-> a
-> DdbConfiguration qt
-> SignatureData
-> SignedQuery
ddbSignQuery target body di sd
= SignedQuery {
sqMethod = Post
, sqProtocol = ddbcProtocol di
, sqHost = host
, sqPort = fromMaybe (defaultPort (ddbcProtocol di)) (ddbcPort di)
, sqPath = "/"
, sqQuery = []
, sqDate = Just $ signatureTime sd
, sqAuthorization = Just auth
, sqContentType = Just "application/x-amz-json-1.0"
, sqContentMd5 = Nothing
, sqAmzHeaders = amzHeaders ++ maybe [] (\tok -> [("x-amz-security-token",tok)]) (iamToken credentials)
, sqOtherHeaders = []
, sqBody = Just $ HTTP.RequestBodyLBS bodyLBS
, sqStringToSign = canonicalRequest
}
where
credentials = signatureCredentials sd
Region{..} = ddbcRegion di
host = rUri
sigTime = fmtTime "%Y%m%dT%H%M%SZ" $ signatureTime sd
bodyLBS = A.encode body
bodyHash = Base16.encode $ toBytes (hashlazy bodyLBS :: Digest SHA256)
-- for some reason AWS doesn't want the x-amz-security-token in the canonical request
amzHeaders = [ ("x-amz-date", sigTime)
, ("x-amz-target", dyApiVersion <> target)
]
canonicalHeaders = sortBy (compare `on` fst) $ amzHeaders ++
[("host", host),
("content-type", "application/x-amz-json-1.0")]
canonicalRequest = B.concat $ intercalate ["\n"] (
[ ["POST"]
, ["/"]
, [] -- query string
] ++
map (\(a,b) -> [CI.foldedCase a,":",b]) canonicalHeaders ++
[ [] -- end headers
, intersperse ";" (map (CI.foldedCase . fst) canonicalHeaders)
, [bodyHash]
])
auth = authorizationV4 sd HmacSHA256 rName "dynamodb"
"content-type;host;x-amz-date;x-amz-target"
canonicalRequest
data AmazonError = AmazonError {
aeType :: T.Text
, aeMessage :: Maybe T.Text
}
instance FromJSON AmazonError where
parseJSON (Object v) = AmazonError
<$> v .: "__type"
<*> (Just <$> (v .: "message" <|> v .: "Message") <|> pure Nothing)
parseJSON _ = error $ "aws: unexpected AmazonError message"
-------------------------------------------------------------------------------
ddbResponseConsumer :: A.FromJSON a => IORef DdbResponse -> HTTPResponseConsumer a
ddbResponseConsumer ref resp = do
val <- HTTP.responseBody resp $$+- sinkParser (A.json' <* AttoB.endOfInput)
case statusCode of
200 -> rSuccess val
_ -> rError val
where
header = fmap T.decodeUtf8 . flip lookup (HTTP.responseHeaders resp)
amzId = header "x-amzn-RequestId"
amzCrc = header "x-amz-crc32"
meta = DdbResponse amzCrc amzId
tellMeta = liftIO $ tellMetadataRef ref meta
rSuccess val =
case A.fromJSON val of
A.Success a -> return a
A.Error err -> do
tellMeta
throwM $ JsonProtocolError val (T.pack err)
rError val = do
tellMeta
case parseEither parseJSON val of
Left e ->
throwM $ JsonProtocolError val (T.pack e)
Right err'' -> do
let e = T.drop 1 . snd . T.breakOn "#" $ aeType err''
errCode <- readErrCode e
throwM $ DdbError statusCode errCode (fromMaybe "" $ aeMessage err'')
readErrCode txt =
let txt' = T.unpack txt
in case readMay txt' of
Just e -> return $ e
Nothing -> throwM (UnknownDynamoErrCode txt)
HTTP.Status{..} = HTTP.responseStatus resp
-- | Conditions used by mutation operations ('PutItem', 'UpdateItem',
-- etc.). The default 'def' instance is empty (no condition).
data Conditions = Conditions CondMerge [Condition]
deriving (Eq,Show,Read,Ord,Typeable)
instance Default Conditions where
def = Conditions CondAnd []
expectsJson :: Conditions -> [A.Pair]
expectsJson = conditionsJson "Expected"
-- | JSON encoding of conditions parameter in various contexts.
conditionsJson :: T.Text -> Conditions -> [A.Pair]
conditionsJson key (Conditions op es) = b ++ a
where
a = if null es
then []
else [key .= object (map conditionJson es)]
b = if length (take 2 es) > 1
then ["ConditionalOperator" .= String (rendCondOp op) ]
else []
-------------------------------------------------------------------------------
rendCondOp :: CondMerge -> T.Text
rendCondOp CondAnd = "AND"
rendCondOp CondOr = "OR"
-------------------------------------------------------------------------------
-- | How to merge multiple conditions.
data CondMerge = CondAnd | CondOr
deriving (Eq,Show,Read,Ord,Typeable)
-- | A condition used by mutation operations ('PutItem', 'UpdateItem', etc.).
data Condition = Condition {
condAttr :: T.Text
-- ^ Attribute to use as the basis for this conditional
, condOp :: CondOp
-- ^ Operation on the selected attribute
} deriving (Eq,Show,Read,Ord,Typeable)
-------------------------------------------------------------------------------
-- | Conditional operation to perform on a field.
data CondOp
= DEq DValue
| NotEq DValue
| DLE DValue
| DLT DValue
| DGE DValue
| DGT DValue
| NotNull
| IsNull
| Contains DValue
| NotContains DValue
| Begins DValue
| In [DValue]
| Between DValue DValue
deriving (Eq,Show,Read,Ord,Typeable)
-------------------------------------------------------------------------------
getCondValues :: CondOp -> [DValue]
getCondValues c = case c of
DEq v -> [v]
NotEq v -> [v]
DLE v -> [v]
DLT v -> [v]
DGE v -> [v]
DGT v -> [v]
NotNull -> []
IsNull -> []
Contains v -> [v]
NotContains v -> [v]
Begins v -> [v]
In v -> v
Between a b -> [a,b]
-------------------------------------------------------------------------------
renderCondOp :: CondOp -> T.Text
renderCondOp c = case c of
DEq{} -> "EQ"
NotEq{} -> "NE"
DLE{} -> "LE"
DLT{} -> "LT"
DGE{} -> "GE"
DGT{} -> "GT"
NotNull -> "NOT_NULL"
IsNull -> "NULL"
Contains{} -> "CONTAINS"
NotContains{} -> "NOT_CONTAINS"
Begins{} -> "BEGINS_WITH"
In{} -> "IN"
Between{} -> "BETWEEN"
conditionJson :: Condition -> Pair
conditionJson Condition{..} = condAttr .= condOp
instance ToJSON CondOp where
toJSON c = object $ ("ComparisonOperator" .= String (renderCondOp c)) : valueList
where
valueList =
let vs = getCondValues c in
if null vs
then []
else ["AttributeValueList" .= vs]
-------------------------------------------------------------------------------
dyApiVersion :: B.ByteString
dyApiVersion = "DynamoDB_20120810."
-------------------------------------------------------------------------------
-- | The standard response metrics on capacity consumption.
data ConsumedCapacity = ConsumedCapacity {
capacityUnits :: Int64
, capacityGlobalIndex :: [(T.Text, Int64)]
, capacityLocalIndex :: [(T.Text, Int64)]
, capacityTableUnits :: Maybe Int64
, capacityTable :: T.Text
} deriving (Eq,Show,Read,Ord,Typeable)
instance FromJSON ConsumedCapacity where
parseJSON (Object v) = ConsumedCapacity
<$> v .: "CapacityUnits"
<*> (HM.toList <$> v .:? "GlobalSecondaryIndexes" .!= mempty)
<*> (HM.toList <$> v .:? "LocalSecondaryIndexes" .!= mempty)
<*> (v .:? "Table" >>= maybe (return Nothing) (.: "CapacityUnits"))
<*> v .: "TableName"
parseJSON _ = fail "ConsumedCapacity must be an Object."
data ReturnConsumption = RCIndexes | RCTotal | RCNone
deriving (Eq,Show,Read,Ord,Typeable)
instance ToJSON ReturnConsumption where
toJSON RCIndexes = String "INDEXES"
toJSON RCTotal = String "TOTAL"
toJSON RCNone = String "NONE"
instance Default ReturnConsumption where
def = RCNone
data ReturnItemCollectionMetrics = RICMSize | RICMNone
deriving (Eq,Show,Read,Ord,Typeable)
instance ToJSON ReturnItemCollectionMetrics where
toJSON RICMSize = String "SIZE"
toJSON RICMNone = String "NONE"
instance Default ReturnItemCollectionMetrics where
def = RICMNone
data ItemCollectionMetrics = ItemCollectionMetrics {
icmKey :: (T.Text, DValue)
, icmEstimate :: [Double]
} deriving (Eq,Show,Read,Ord,Typeable)
instance FromJSON ItemCollectionMetrics where
parseJSON (Object v) = ItemCollectionMetrics
<$> (do m <- v .: "ItemCollectionKey"
return $ head $ HM.toList m)
<*> v .: "SizeEstimateRangeGB"
parseJSON _ = fail "ItemCollectionMetrics must be an Object."
-------------------------------------------------------------------------------
-- | What to return from the current update operation
data UpdateReturn
= URNone -- ^ Return nothing
| URAllOld -- ^ Return old values
| URUpdatedOld -- ^ Return old values with a newer replacement
| URAllNew -- ^ Return new values
| URUpdatedNew -- ^ Return new values that were replacements
deriving (Eq,Show,Read,Ord,Typeable)
instance ToJSON UpdateReturn where
toJSON URNone = toJSON (String "NONE")
toJSON URAllOld = toJSON (String "ALL_OLD")
toJSON URUpdatedOld = toJSON (String "UPDATED_OLD")
toJSON URAllNew = toJSON (String "ALL_NEW")
toJSON URUpdatedNew = toJSON (String "UPDATED_NEW")
instance Default UpdateReturn where
def = URNone
-------------------------------------------------------------------------------
-- | What to return from a 'Query' or 'Scan' query.
data QuerySelect
= SelectSpecific [T.Text]
-- ^ Only return selected attributes
| SelectCount
-- ^ Return counts instead of attributes
| SelectProjected
-- ^ Return index-projected attributes
| SelectAll
-- ^ Default. Return everything.
deriving (Eq,Show,Read,Ord,Typeable)
instance Default QuerySelect where def = SelectAll
-------------------------------------------------------------------------------
querySelectJson (SelectSpecific as) =
[ "Select" .= String "SPECIFIC_ATTRIBUTES"
, "AttributesToGet" .= as]
querySelectJson SelectCount = ["Select" .= String "COUNT"]
querySelectJson SelectProjected = ["Select" .= String "ALL_PROJECTED_ATTRIBUTES"]
querySelectJson SelectAll = ["Select" .= String "ALL_ATTRIBUTES"]
-------------------------------------------------------------------------------
-- | A class to help predict DynamoDb size of values, attributes and
-- entire items. The result is given in number of bytes.
class DynSize a where
dynSize :: a -> Int
instance DynSize DValue where
dynSize (DNum _) = 8
dynSize (DString a) = T.length a
dynSize (DBinary bs) = T.length . T.decodeUtf8 $ Base64.encode bs
dynSize (DNumSet s) = 8 * S.size s
dynSize (DStringSet s) = sum $ map (dynSize . DString) $ S.toList s
dynSize (DBinSet s) = sum $ map (dynSize . DBinary) $ S.toList s
instance DynSize Attribute where
dynSize (Attribute k v) = T.length k + dynSize v
instance DynSize Item where
dynSize m = sum $ map dynSize $ attributes m
instance DynSize a => DynSize [a] where
dynSize as = sum $ map dynSize as
instance DynSize a => DynSize (Maybe a) where
dynSize = maybe 0 dynSize
instance (DynSize a, DynSize b) => DynSize (Either a b) where
dynSize = either dynSize dynSize
-------------------------------------------------------------------------------
-- | Will an attribute be considered empty by DynamoDb?
--
-- A 'PutItem' (or similar) with empty attributes will be rejected
-- with a 'ValidationException'.
nullAttr :: Attribute -> Bool
nullAttr (Attribute _ val) =
case val of
DString "" -> True
DBinary "" -> True
DNumSet s | S.null s -> True
DStringSet s | S.null s -> True
DBinSet s | S.null s -> True
_ -> False
-------------------------------------------------------------------------------
--
-- | Item Parsing
--
-------------------------------------------------------------------------------
-- | Failure continuation.
type Failure f r = String -> f r
-- | Success continuation.
type Success a f r = a -> f r
-- | A continuation-based parser type.
newtype Parser a = Parser {
runParser :: forall f r.
Failure f r
-> Success a f r
-> f r
}
instance Monad Parser where
m >>= g = Parser $ \kf ks -> let ks' a = runParser (g a) kf ks
in runParser m kf ks'
{-# INLINE (>>=) #-}
return a = Parser $ \_kf ks -> ks a
{-# INLINE return #-}
fail msg = Parser $ \kf _ks -> kf msg
{-# INLINE fail #-}
instance Functor Parser where
fmap f m = Parser $ \kf ks -> let ks' a = ks (f a)
in runParser m kf ks'
{-# INLINE fmap #-}
instance Applicative Parser where
pure = return
{-# INLINE pure #-}
(<*>) = apP
{-# INLINE (<*>) #-}
instance Alternative Parser where
empty = fail "empty"
{-# INLINE empty #-}
(<|>) = mplus
{-# INLINE (<|>) #-}
instance MonadPlus Parser where
mzero = fail "mzero"
{-# INLINE mzero #-}
mplus a b = Parser $ \kf ks -> let kf' _ = runParser b kf ks
in runParser a kf' ks
{-# INLINE mplus #-}
instance Monoid (Parser a) where
mempty = fail "mempty"
{-# INLINE mempty #-}
mappend = mplus
{-# INLINE mappend #-}
apP :: Parser (a -> b) -> Parser a -> Parser b
apP d e = do
b <- d
a <- e
return (b a)
{-# INLINE apP #-}
-------------------------------------------------------------------------------
-- | Types convertible to DynamoDb 'Item' collections.
--
-- Use 'attr' and 'attrAs' combinators to conveniently define instances.
class ToDynItem a where
toItem :: a -> Item
-------------------------------------------------------------------------------
-- | Types parseable from DynamoDb 'Item' collections.
--
-- User 'getAttr' family of functions to applicatively or monadically
-- parse into your custom types.
class FromDynItem a where
parseItem :: Item -> Parser a
instance ToDynItem Item where toItem = id
instance FromDynItem Item where parseItem = return
instance DynVal a => ToDynItem [(T.Text, a)] where
toItem as = item $ map (uncurry attr) as
instance (Typeable a, DynVal a) => FromDynItem [(T.Text, a)] where
parseItem i = mapM f $ M.toList i
where
f (k,v) = do
v' <- maybe (fail (valErr (Tagged v :: Tagged a DValue))) return $
fromValue v
return (k, v')
instance DynVal a => ToDynItem (M.Map T.Text a) where
toItem m = toItem $ M.toList m
instance (Typeable a, DynVal a) => FromDynItem (M.Map T.Text a) where
parseItem i = M.fromList <$> parseItem i
valErr :: forall a. Typeable a => Tagged a DValue -> String
valErr (Tagged dv) = "Can't convert DynamoDb value " <> show dv <>
" into type " <> (show (typeOf (undefined :: a)))
-- | Convenience combinator for parsing fields from an 'Item' returned
-- by DynamoDb.
getAttr
:: forall a. (Typeable a, DynVal a)
=> T.Text
-- ^ Attribute name
-> Item
-- ^ Item from DynamoDb
-> Parser a
getAttr k m = do
case M.lookup k m of
Nothing -> fail ("Key " <> T.unpack k <> " not found")
Just dv -> maybe (fail (valErr (Tagged dv :: Tagged a DValue))) return $ fromValue dv
-- | Parse attribute if it's present in the 'Item'. Fail if attribute
-- is present but conversion fails.
getAttr'
:: forall a. (Typeable a, DynVal a)
=> T.Text
-- ^ Attribute name
-> Item
-- ^ Item from DynamoDb
-> Parser (Maybe a)
getAttr' k m = do
case M.lookup k m of
Nothing -> return Nothing
Just dv -> return $ fromValue dv
-------------------------------------------------------------------------------
-- | Parse an 'Item' into target type using the 'FromDynItem'
-- instance.
fromItem :: FromDynItem a => Item -> Either String a
fromItem i = runParser (parseItem i) Left Right
| frms-/aws | Aws/DynamoDb/Core.hs | bsd-3-clause | 39,556 | 0 | 21 | 9,583 | 9,530 | 5,087 | 4,443 | 794 | 13 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Snaplet.Authentication
( initAuthentication
, Authentication
, requireUser
, withUser
, makeSessionJSON
, module Snaplet.Authentication.Queries
, module Snaplet.Authentication.Schema
, AuthConfig(..)
) where
import Control.Lens
import Control.Monad.CatchIO hiding (Handler)
import Control.Monad.IO.Class
import Control.Monad.Reader
import Control.Monad.Trans.Either
import Crypto.BCrypt
import Data.ByteString
import Data.Monoid
import Data.Text as T
import Data.Text.Encoding
import Data.Time
import Data.UUID
import Database.Esqueleto hiding (migrate)
import qualified Database.Persist
import qualified Kashmir.Github as Github
import Kashmir.Snap.Snaplet.Random
import Kashmir.Snap.Utils
import Kashmir.UUID
import Kashmir.Web
import Snap hiding (with)
import Snaplet.Authentication.Common
import Snaplet.Authentication.Exception
import Snaplet.Authentication.PasswordReset
import Snaplet.Authentication.Queries
import Snaplet.Authentication.Schema
import Snaplet.Authentication.Session
import Snaplet.Authentication.Types
------------------------------------------------------------
githubLoginUrl :: Github.Config -> Text
githubLoginUrl config =
T.pack $
mconcat
[ view Github.authUrl config
, "?scope=user:email,read:org,admin:repo_hook,&client_id="
, view Github.clientId config
]
githubLoginHandler :: Handler b (Authentication b) ()
githubLoginHandler = do
githubConfig <- view (authConfig . github)
redirect . encodeUtf8 $ githubLoginUrl githubConfig
upsertAccountFromGithub
:: Github.Config
-> ByteString
-> UUID
-> ConnectionPool
-> IO (UTCTime, Key Account)
upsertAccountFromGithub githubConfig code uuid connection = do
accessToken <-
view Github.accessToken <$> Github.requestAccess githubConfig code
user <- runReaderT Github.getUserDetails accessToken
now <- getCurrentTime
accountKey <-
runSqlPersistMPool
(createOrUpdateGithubUser uuid now accessToken user)
connection
return (now, accountKey)
processGithubAccessToken :: Text
-> ByteString
-> Handler b (Authentication b) ()
processGithubAccessToken redirectTarget code = do
githubConfig <- view (authConfig . github)
connection <- getConnection
randomNumberGenerator <- view randomNumberGeneratorLens
uuid <- Snap.withTop randomNumberGenerator getRandom
(now, accountKey) <-
liftIO $ upsertAccountFromGithub githubConfig code uuid connection
logError $
"Upserted account key: " <> (toStrictByteString . unAccountKey) accountKey
writeAuthToken (addUTCTime twoWeeks now) (unAccountKey accountKey)
redirect $ encodeUtf8 redirectTarget
githubCallbackHandler :: Text -> Handler b (Authentication b) ()
githubCallbackHandler redirectTarget =
method GET $ requireParam "code" >>= processGithubAccessToken redirectTarget
------------------------------------------------------------
registrationHandler :: Handler b (Authentication b) ()
registrationHandler =
method POST $ do
payload <- requireBoundedJSON 1024
connection <- getConnection
randomNumberGenerator <- view randomNumberGeneratorLens
uuid <- Snap.withTop randomNumberGenerator getRandom
maybeAccount <- liftIO $ createPasswordAccount payload uuid connection
case maybeAccount of
Nothing -> handleErrorWithMessage 409 "Conflict"
Just account -> do
logError $ "Created account: " <> encodeUtf8 (T.pack (show account))
authorizedAccountResponse account
createPasswordAccount :: Registration
-> UUID
-> ConnectionPool
-> IO (Maybe Account)
createPasswordAccount payload uuid connection = do
now <- getCurrentTime
runSqlPersistMPool (createPasswordUser uuid now payload) connection
------------------------------------------------------------
processEmailPassword :: Login -> Handler b (Authentication b) ()
processEmailPassword payload = do
matchingAccount <- handleSql (lookupByEmail (loginEmail payload))
case matchingAccount of
Nothing -> unauthorized
-- Validate password.
Just (account, accountUidpwd) ->
if validatePassword
(encodeUtf8 (accountUidpwdPassword accountUidpwd))
(encodeUtf8 (loginPassword payload))
then authorizedAccountResponse account
else unauthorized
emailPasswordLoginHandler :: Handler b (Authentication b) ()
emailPasswordLoginHandler =
method POST $ do
payload <- requireBoundedJSON 1024
matchingAccount <- handleSql (lookupByEmail (loginEmail payload))
case matchingAccount of
Nothing -> unauthorized
-- Validate password.
Just (account, accountUidpwd) ->
if validatePassword
(encodeUtf8 (accountUidpwdPassword accountUidpwd))
(encodeUtf8 (loginPassword payload))
then authorizedAccountResponse account
else unauthorized
-- | Require that an authenticated AuthUser is present in the current session.
-- This function has no DB cost - only checks to see if the client has passed a valid auth token.
requireUser
:: SnapletLens b (Authentication b)
-> Handler b v a
-> (Key Account -> Handler b v a)
-> Handler b v a
requireUser aLens bad good = do
authToken <- Snap.withTop aLens readAuthToken
case authToken of
Nothing -> bad
Just t -> good (AccountKey t)
withUser
:: SnapletLens b (Authentication b)
-> (Maybe (Key Account) -> Handler b v a)
-> Handler b v a
withUser aLens handler = do
maybeKey <- Snap.withTop aLens readAuthToken
handler (AccountKey <$> maybeKey)
------------------------------------------------------------
logoutHandler
:: MonadSnap m
=> m ()
logoutHandler = do
removeAuthToken
redirect "/"
authStatusHandler :: Handler b (Authentication b) ()
authStatusHandler =
method GET $ do
logError "Looking up user details."
authToken <- readAuthToken
logError $ "Got auth token: " <> maybe "<none>" toStrictByteString authToken
case authToken of
Nothing -> removeAuthToken >> pass
Just accountId -> do
account <- handleSql (Database.Persist.get $ AccountKey accountId)
case account of
Nothing -> throw AccountNotFound
Just a -> writeJSON a
migrate :: ConnectionPool -> EitherT Text IO ConnectionPool
migrate pool = do
lift $ runSqlPersistMPool (runMigration migrateAccounts) pool
return pool
initAuthentication
:: Text
-> AuthConfig
-> SnapletLens b ConnectionPool
-> SnapletLens b RandomNumberGenerator
-> SnapletInit b (Authentication b)
initAuthentication redirectTarget _authConfig _poolLens _randomNumberGeneratorLens =
makeSnaplet "authentication" "Authentication Snaplet" Nothing $ do
addRoutes
[ ("/login/uidpwd", emailPasswordLoginHandler)
, ("/registration/uidpwd", registrationHandler)
, ("/reset/uidpwd", emailPasswordResetHandler)
, ("/reset/uidpwd/complete", emailPasswordResetCompletionHandler)
, ("/login/github", githubLoginHandler)
, ("/callback/github", githubCallbackHandler redirectTarget)
, ("/logout", logoutHandler)
, ("/status", authStatusHandler)
]
_ <- Snap.withTop _poolLens $ addPostInitHook migrate
return Authentication {..}
| krisajenkins/snaplet-auth | src/Snaplet/Authentication.hs | bsd-3-clause | 7,436 | 0 | 18 | 1,390 | 1,754 | 887 | 867 | 188 | 3 |
----------------------------------------------------------------------------
-- |
-- Module : BWildcardExportListWithChildren
-- Copyright : (c) Sergey Vinokurov 2018
-- License : BSD3-style (see LICENSE)
-- Maintainer : [email protected]
----------------------------------------------------------------------------
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
module BWildcardExportListWithChildren
( FooB(..)
, BarB(..)
, pattern BazBP
, quuxB
, pattern FrobBP
, QuuxB(..)
, pattern QuuxBP
, commonFunc
, derivedB
) where
data FooB = FooB1
{ fooB1 :: Int
, fooB2 :: !Double
}
newtype BarB =
BarB1
{ unBarB :: [Double] }
pattern BazBP :: Double -> Double -> BarB
pattern BazBP x y = BarB1 [x, y]
quuxB :: Int -> Int
quuxB x = x
pattern FrobBP :: Int -> FooB
pattern FrobBP x = FooB1 { fooB1 = x, fooB2 = 0 }
data QuuxB =
QuuxB1 Int
| QuuxB2
pattern QuuxBP :: Int -> QuuxB
pattern QuuxBP n = QuuxB1 n
commonFunc :: Double -> Double
commonFunc x = x + x * x
$([d|
derivedB :: Int -> Int
derivedB x = x
|])
| sergv/tags-server | test-data/0012resolve_reexport_import_cycles/BWildcardExportListWithChildren.hs | bsd-3-clause | 1,131 | 0 | 9 | 246 | 263 | 155 | 108 | 37 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Time.HR.Corpus
( corpus ) where
import Prelude
import Data.String
import Duckling.Lang
import Duckling.Resolve
import Duckling.Time.Corpus
import Duckling.Time.Types hiding (Month)
import Duckling.TimeGrain.Types hiding (add)
import Duckling.Testing.Types hiding (examples)
corpus :: Corpus
corpus = (testContext {lang = HR}, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (datetime (2013, 2, 12, 4, 30, 0) Second)
[ "sad"
, "sada"
, "upravo sad"
, "ovaj tren"
]
, examples (datetime (2013, 2, 12, 0, 0, 0) Day)
[ "danas"
]
, examples (datetime (2013, 2, 11, 0, 0, 0) Day)
[ "jucer"
, "jučer"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "sutra"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "ponedjeljak"
, "pon."
, "ovaj ponedjeljak"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "ponedjeljak, 18. veljace"
, "ponedjeljak, 18. veljače"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "utorak"
, "utorak 19."
]
, examples (datetime (2013, 2, 14, 0, 0, 0) Day)
[ "cetvrtak"
, "četvrtak"
, "čet"
, "cet."
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "petak"
, "pet"
, "pet."
]
, examples (datetime (2013, 2, 16, 0, 0, 0) Day)
[ "subota"
, "sub"
, "sub."
]
, examples (datetime (2013, 2, 17, 0, 0, 0) Day)
[ "nedjelja"
, "ned"
, "ned."
]
, examples (datetime (2013, 3, 1, 0, 0, 0) Day)
[ "1. ozujak"
, "1. ožujak"
, "prvi ozujka"
]
, examples (datetime (2013, 3, 3, 0, 0, 0) Day)
[ "treci ozujka"
, "treci ožujka"
]
, examples (datetime (2015, 3, 3, 0, 0, 0) Day)
[ "3. ozujka 2015"
, "treci ozujka 2015"
, "3/3/2015"
, "3/3/15"
, "2015-3-3"
, "2015-03-03"
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "15ti drugi"
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "15. veljace"
, "15. veljače"
, "15/02"
]
, examples (datetime (2013, 8, 8, 0, 0, 0) Day)
[ "8. kolovoza"
, "8. kolovoz"
]
, examples (datetime (2014, 10, 0, 0, 0, 0) Month)
[ "listopad 2014"
]
, examples (datetime (1974, 10, 31, 0, 0, 0) Day)
[ "31/10/1974"
, "31/10/74"
, "74-10-31"
]
, examples (datetime (2015, 4, 14, 0, 0, 0) Day)
[ "14travanj 2015"
, "14. travnja, 2015"
, "14. travanj 15"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "sljedeci utorak"
, "sljedeceg utorka"
]
, examples (datetime (2013, 2, 22, 0, 0, 0) Day)
[ "petak nakon sljedeceg"
]
, examples (datetime (2013, 3, 0, 0, 0, 0) Month)
[ "sljedeci ozujak"
]
, examples (datetime (2014, 3, 0, 0, 0, 0) Month)
[ "ozujak nakon sljedeceg"
]
, examples (datetime (2013, 2, 10, 0, 0, 0) Day)
[ "nedjelja, 10. veljace"
, "nedjelja, 10. veljače"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "Sri, 13. velj"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "ponedjeljak, veljaca 18."
, "Pon, 18. veljace"
]
, examples (datetime (2013, 2, 11, 0, 0, 0) Week)
[ "ovaj tjedan"
]
, examples (datetime (2013, 2, 4, 0, 0, 0) Week)
[ "prosli tjedan"
, "prošli tjedan"
, "prethodni tjedan"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Week)
[ "sljedeci tjedan"
]
, examples (datetime (2013, 1, 0, 0, 0, 0) Month)
[ "prethodni mjesec"
]
, examples (datetime (2013, 3, 0, 0, 0, 0) Month)
[ "sljedeci mjesec"
]
, examples (datetime (2013, 1, 1, 0, 0, 0) Quarter)
[ "ovaj kvartal"
, "ovo tromjesecje"
]
, examples (datetime (2013, 4, 1, 0, 0, 0) Quarter)
[ "sljedeci kvartal"
]
, examples (datetime (2013, 7, 1, 0, 0, 0) Quarter)
[ "treci kvartal"
, "3. kvartal"
, "trece tromjesecje"
, "3. tromjesečje"
]
, examples (datetime (2018, 10, 1, 0, 0, 0) Quarter)
[ "4. kvartal 2018"
, "četvrto tromjesečje 2018"
]
, examples (datetime (2012, 0, 0, 0, 0, 0) Year)
[ "prošla godina"
, "prethodna godina"
]
, examples (datetime (2013, 0, 0, 0, 0, 0) Year)
[ "ova godina"
]
, examples (datetime (2014, 0, 0, 0, 0, 0) Year)
[ "sljedece godina"
]
, examples (datetime (2013, 2, 10, 0, 0, 0) Day)
[ "prosle nedjelje"
, "prosli tjedan u nedjelju"
]
, examples (datetime (2013, 2, 5, 0, 0, 0) Day)
[ "prosli utorak"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "sljedeci utorak"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "sljedecu srijedu"
]
, examples (datetime (2013, 2, 20, 0, 0, 0) Day)
[ "sljedeci tjedan u srijedu"
, "srijeda sljedeci tjedan"
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "sljedeci petak"
]
, examples (datetime (2013, 2, 11, 0, 0, 0) Day)
[ "ovaj tjedan u ponedjeljak"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "ovaj utorak"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "ova srijeda"
, "ovaj tjedan u srijedu"
]
, examples (datetime (2013, 2, 14, 0, 0, 0) Day)
[ "prekosutra"
]
, examples (datetime (2013, 2, 14, 17, 0, 0) Hour)
[ "prekosutra u 5 popodne"
, "prekosutra u 17"
]
, examples (datetime (2013, 2, 10, 0, 0, 0) Day)
[ "prekjucer"
, "prekjučer"
]
, examples (datetime (2013, 2, 10, 8, 0, 0) Hour)
[ "prekjučer u 8"
, "prekjučer u 8 sati"
]
, examples (datetime (2013, 3, 25, 0, 0, 0) Day)
[ "zadnji ponedjeljak u ozujku"
]
, examples (datetime (2014, 3, 30, 0, 0, 0) Day)
[ "zadnja nedjelja u ozujku 2014"
]
, examples (datetime (2013, 10, 3, 0, 0, 0) Day)
[ "treci dan u listopadu"
]
, examples (datetime (2014, 10, 6, 0, 0, 0) Week)
[ "prvi tjedan u listopadu 2014"
]
, examples (datetime (2015, 10, 31, 0, 0, 0) Day)
[ "zadnji dan u listopadu 2015"
]
, examples (datetime (2014, 9, 22, 0, 0, 0) Week)
[ "zadnji tjedan u rujnu 2014"
]
, examples (datetime (2013, 10, 1, 0, 0, 0) Day)
[ "prvi utorak u listopadu"
]
, examples (datetime (2014, 9, 16, 0, 0, 0) Day)
[ "treci utorak u rujnu 2014"
]
, examples (datetime (2014, 10, 1, 0, 0, 0) Day)
[ "prva srijeda u listopadu 2014"
]
, examples (datetime (2014, 10, 8, 0, 0, 0) Day)
[ "druga srijeda u listopadu 2014"
]
, examples (datetime (2015, 1, 13, 0, 0, 0) Day)
[ "treci utorak poslije Bozica 2014"
]
, examples (datetime (2013, 2, 13, 3, 0, 0) Hour)
[ "3 u noci"
, "u 3 ujutro"
, "u tri sata u noci"
]
, examples (datetime (2013, 2, 12, 3, 18, 0) Minute)
[ "3:18 rano"
]
, examples (datetime (2013, 2, 12, 15, 0, 0) Hour)
[ "u 3 poslijepodne"
, "@ 15"
, "15 sati poslijepodne"
]
, examples (datetime (2013, 2, 12, 15, 0, 0) Hour)
[ "oko 3 poslijepodne"
, "otprilike u 3 poslijepodne"
, "cca 3 poslijepodne"
, "cca 15"
]
, examples (datetime (2013, 2, 12, 15, 15, 0) Minute)
[ "15 i 15"
, "3:15 poslijepodne"
, "15:15"
]
, examples (datetime (2013, 2, 12, 15, 15, 0) Minute)
[ "cetvrt nakon 3 poslijepodne"
]
, examples (datetime (2013, 2, 12, 15, 20, 0) Minute)
[ "3 i 20 popodne"
, "3:20 poslijepodne"
, "3:20 popodne"
, "dvadeset nakon 3 popodne"
, "15:20"
]
, examples (datetime (2013, 2, 12, 15, 30, 0) Minute)
[ "tri i po popodne"
, "pola 4 popodne"
, "15:30"
, "pola cetiri popodne"
]
, examples (datetime (2013, 2, 12, 15, 23, 24) Second)
[ "15:23:24"
]
, examples (datetime (2013, 2, 12, 11, 45, 0) Minute)
[ "petnaest do podne"
, "11:45"
, "četvrt do podneva"
]
, examples (datetime (2013, 2, 12, 20, 0, 0) Hour)
[ "8 navecer"
, "osam sati navecer"
, "danas 8 navecer"
]
, examples (datetime (2013, 9, 20, 19, 30, 0) Minute)
[ "u 7:30 popodne u pet, 20. rujna"
]
, examples (datetime (2013, 2, 16, 9, 0, 0) Hour)
[ "9 ujutro u subotu"
, "u subotu u 9 sati ujutro"
]
, examples (datetime (2014, 7, 18, 19, 0, 0) Minute)
[ "pet, srp 18., 2014, 19:00"
, "pet, srp 18., 2014 u 19:00"
]
, examples (datetime (2013, 2, 12, 4, 30, 1) Second)
[ "za jednu sekundu"
]
, examples (datetime (2013, 2, 12, 4, 31, 0) Second)
[ "za jednu minutu"
]
, examples (datetime (2013, 2, 12, 4, 32, 0) Second)
[ "za 2 minute"
, "za jos 2 minute"
, "2 minute od sad"
]
, examples (datetime (2013, 2, 12, 5, 30, 0) Second)
[ "za 60 minuta"
]
, examples (datetime (2013, 2, 12, 4, 45, 0) Second)
[ "oko cetvrt sata"
, "oko 1/4h"
, "oko 1/4 h"
, "oko 1/4 sata"
]
, examples (datetime (2013, 2, 12, 5, 0, 0) Second)
[ "za pola sata"
, "za pol sata"
, "za 1/2h"
, "za 1/2 h"
, "za 1/2 sata"
]
, examples (datetime (2013, 2, 12, 5, 15, 0) Second)
[ "za tri-cetvrt sata"
, "za 3/4h"
, "za 3/4 h"
, "za 3/4 sata"
]
, examples (datetime (2013, 2, 12, 7, 0, 0) Second)
[ "za 2.5 sata"
]
, examples (datetime (2013, 2, 12, 5, 30, 0) Minute)
[ "za jedan sat"
, "za 1h"
]
, examples (datetime (2013, 2, 12, 6, 30, 0) Minute)
[ "za par sati"
]
, examples (datetime (2013, 2, 12, 7, 30, 0) Minute)
[ "za nekoliko sati"
]
, examples (datetime (2013, 2, 13, 4, 30, 0) Minute)
[ "za 24 sata"
, "za 24h"
]
, examples (datetime (2013, 2, 13, 4, 0, 0) Hour)
[ "za 1 dan"
, "za jedan dan"
]
, examples (datetime (2016, 2, 0, 0, 0, 0) Month)
[ "3 godine od danasnjeg dana"
]
, examples (datetime (2013, 2, 19, 4, 0, 0) Hour)
[ "za 7 dana"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "za 1 tjedan"
]
, examples (datetime (2013, 2, 12, 5, 0, 0) Second)
[ "za oko pola sata"
]
, examples (datetime (2013, 2, 5, 4, 0, 0) Hour)
[ "prije 7 dana"
]
, examples (datetime (2013, 1, 29, 4, 0, 0) Hour)
[ "prije 14 dana"
]
, examples (datetime (2013, 2, 5, 0, 0, 0) Day)
[ "prije jedan tjedan"
, "prije jednog tjedna"
]
, examples (datetime (2013, 1, 22, 0, 0, 0) Day)
[ "prije tri tjedna"
]
, examples (datetime (2012, 11, 12, 0, 0, 0) Day)
[ "prije tri mjeseca"
]
, examples (datetime (2011, 2, 0, 0, 0, 0) Month)
[ "prije dvije godine"
]
, examples (datetime (1954, 0, 0, 0, 0, 0) Year)
[ "1954"
]
, examples (datetime (2013, 2, 19, 4, 0, 0) Hour)
[ "za 7 dana"
]
, examples (datetime (2013, 2, 26, 4, 0, 0) Hour)
[ "za 14 dana"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "za jedan tjedan"
]
, examples (datetime (2013, 3, 5, 0, 0, 0) Day)
[ "za tri tjedna"
]
, examples (datetime (2013, 5, 12, 0, 0, 0) Day)
[ "za tri mjeseca"
]
, examples (datetime (2015, 2, 0, 0, 0, 0) Month)
[ "za dvije godine"
]
, examples (datetime (2013, 12, 0, 0, 0, 0) Month)
[ "jednu godinu poslije Bozica"
]
, examples (datetimeInterval ((2013, 6, 21, 0, 0, 0), (2013, 9, 24, 0, 0, 0)) Day)
[ "ovog ljeta"
, "ovo ljeto"
, "ljetos"
]
, examples (datetimeInterval ((2012, 12, 21, 0, 0, 0), (2013, 3, 21, 0, 0, 0)) Day)
[ "ove zime"
, "zimus"
]
, examples (datetime (2013, 12, 25, 0, 0, 0) Day)
[ "Bozic"
, "zicbo"
]
, examples (datetime (2013, 12, 31, 0, 0, 0) Day)
[ "stara godina"
]
, examples (datetime (2014, 1, 1, 0, 0, 0) Day)
[ "nova godina"
]
, examples (datetime (2013, 2, 14, 0, 0, 0) Day)
[ "valentinovo"
]
, examples (datetime (2013, 5, 12, 0, 0, 0) Day)
[ "majcin dan"
]
, examples (datetime (2013, 6, 16, 0, 0, 0) Day)
[ "dan oceva"
]
, examples (datetime (2013, 10, 31, 0, 0, 0) Day)
[ "noc vjestica"
]
, examples (datetimeInterval ((2013, 2, 12, 18, 0, 0), (2013, 2, 13, 0, 0, 0)) Hour)
[ "veceras"
, "ove veceri"
, "danas navecer"
]
, examples (datetimeInterval ((2013, 2, 8, 18, 0, 0), (2013, 2, 11, 0, 0, 0)) Hour)
[ "prosli vikend"
]
, examples (datetimeInterval ((2013, 2, 13, 18, 0, 0), (2013, 2, 14, 0, 0, 0)) Hour)
[ "sutra navecer"
]
, examples (datetimeInterval ((2013, 2, 13, 12, 0, 0), (2013, 2, 13, 14, 0, 0)) Hour)
[ "sutra rucak"
]
, examples (datetimeInterval ((2013, 2, 11, 18, 0, 0), (2013, 2, 12, 0, 0, 0)) Hour)
[ "jucer navecer"
, "prethodne veceri"
]
, examples (datetimeInterval ((2013, 2, 15, 18, 0, 0), (2013, 2, 18, 0, 0, 0)) Hour)
[ "ovaj vikend"
, "ovog vikenda"
]
, examples (datetimeInterval ((2013, 2, 18, 4, 0, 0), (2013, 2, 18, 12, 0, 0)) Hour)
[ "ponedjeljak ujutro"
]
, examples (datetimeInterval ((2013, 2, 18, 3, 0, 0), (2013, 2, 18, 9, 0, 0)) Hour)
[ "ponedjeljak rano ujutro"
, "ponedjeljak rano"
, "ponedjeljak u rane jutarnje sate"
]
, examples (datetimeInterval ((2013, 2, 15, 4, 0, 0), (2013, 2, 15, 12, 0, 0)) Hour)
[ "15. veljace ujutro"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 29, 58), (2013, 2, 12, 4, 30, 0)) Second)
[ "prosle 2 sekunde"
, "prethodne dvije sekunde"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 30, 1), (2013, 2, 12, 4, 30, 4)) Second)
[ "sljedece 3 sekunde"
, "sljedece tri sekunde"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 28, 0), (2013, 2, 12, 4, 30, 0)) Minute)
[ "prosle 2 minute"
, "prethodne dvije minute"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 31, 0), (2013, 2, 12, 4, 34, 0)) Minute)
[ "sljedece 3 minute"
, "sljedece tri minute"
]
, examples (datetimeInterval ((2013, 2, 12, 3, 0, 0), (2013, 2, 12, 4, 0, 0)) Hour)
[ "prethodni jedan sat"
]
, examples (datetimeInterval ((2013, 2, 11, 4, 0, 0), (2013, 2, 12, 4, 0, 0)) Hour)
[ "prethodna 24 sata"
, "prethodna dvadeset i cetiri sata"
, "prethodna dvadeset i cetiri sata"
, "prethodna 24h"
]
, examples (datetimeInterval ((2013, 2, 12, 5, 0, 0), (2013, 2, 12, 8, 0, 0)) Hour)
[ "sljedeca 3 sata"
, "sljedeca tri sata"
]
, examples (datetimeInterval ((2013, 2, 10, 0, 0, 0), (2013, 2, 12, 0, 0, 0)) Day)
[ "prethodna dva dana"
, "prethodna 2 dana"
, "prosla 2 dana"
]
, examples (datetimeInterval ((2013, 2, 13, 0, 0, 0), (2013, 2, 16, 0, 0, 0)) Day)
[ "sljedeca 3 dana"
, "sljedeca tri dana"
]
, examples (datetimeInterval ((2013, 2, 13, 0, 0, 0), (2013, 2, 16, 0, 0, 0)) Day)
[ "sljedecih nekoliko dana"
]
, examples (datetimeInterval ((2013, 1, 28, 0, 0, 0), (2013, 2, 11, 0, 0, 0)) Week)
[ "prethodna 2 tjedna"
, "prethodna dva tjedna"
, "prosla 2 tjedna"
]
, examples (datetimeInterval ((2013, 2, 18, 0, 0, 0), (2013, 3, 11, 0, 0, 0)) Week)
[ "sljedeca 3 tjedna"
, "sljedeca tri tjedna"
]
, examples (datetimeInterval ((2012, 12, 0, 0, 0, 0), (2013, 2, 0, 0, 0, 0)) Month)
[ "prethodna 2 mjeseca"
, "prethodna dva mjeseca"
]
, examples (datetimeInterval ((2013, 3, 0, 0, 0, 0), (2013, 6, 0, 0, 0, 0)) Month)
[ "sljedeca 3 mjeseca"
, "sljedeca tri mjeseca"
]
, examples (datetimeInterval ((2011, 0, 0, 0, 0, 0), (2013, 0, 0, 0, 0, 0)) Year)
[ "prethodne 2 godine"
, "prethodne dvije godine"
]
, examples (datetimeInterval ((2014, 0, 0, 0, 0, 0), (2017, 0, 0, 0, 0, 0)) Year)
[ "sljedece 3 godine"
, "sljedece tri godine"
]
, examples (datetimeInterval ((2013, 7, 13, 0, 0, 0), (2013, 7, 16, 0, 0, 0)) Day)
[ "srpanj 13-15"
, "srpanj 13 do 15"
, "srpanj 13 - srpanj 15"
]
, examples (datetimeInterval ((2013, 8, 8, 0, 0, 0), (2013, 8, 13, 0, 0, 0)) Day)
[ "kol 8 - kol 12"
]
, examples (datetimeInterval ((2013, 2, 12, 9, 30, 0), (2013, 2, 12, 11, 1, 0)) Minute)
[ "9:30 - 11:00"
]
, examples (datetimeInterval ((2013, 2, 14, 9, 30, 0), (2013, 2, 14, 11, 1, 0)) Minute)
[ "od 9:30 - 11:00 u cetvrtak"
, "između 9:30 i 11:00 u cetvrtak"
, "9:30 - 11:00 u cetvrtak"
, "izmedju 9:30 i 11:00 u cetvrtak"
, "cetvrtak od 9:30 do 11:00"
, "od 9:30 do 11:00 u cetvrtak"
, "cetvrtak od 9:30 do 11:00"
]
, examples (datetimeInterval ((2013, 2, 14, 9, 0, 0), (2013, 2, 14, 12, 0, 0)) Hour)
[ "cetvrtak od 9 do 11 ujutro"
]
, examples (datetimeInterval ((2013, 2, 12, 11, 30, 0), (2013, 2, 12, 13, 31, 0)) Minute)
[ "11:30-1:30"
]
, examples (datetime (2013, 9, 21, 13, 30, 0) Minute)
[ "1:30 poslijepodne u sub, ruj 21."
]
, examples (datetimeInterval ((2013, 2, 18, 0, 0, 0), (2013, 3, 4, 0, 0, 0)) Week)
[ "sljedeca 2 tjedna"
]
, examples (datetimeOpenInterval Before (2013, 2, 12, 14, 0, 0) Hour)
[ "nekad do 2 poslijepodne"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 30, 0), (2013, 2, 13, 0, 0, 0)) Second)
[ "do kraja ovog dana"
, "do kraja dana"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 30, 0), (2013, 3, 1, 0, 0, 0)) Second)
[ "do kraja ovog mjeseca"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 30, 0), (2013, 4, 1, 0, 0, 0)) Second)
[ "do kraja sljedeceg mjeseca"
]
, examples (datetime (2013, 2, 12, 13, 0, 0) Minute)
[ "4 poslijepodne CET"
]
, examples (datetime (2013, 2, 14, 6, 0, 0) Minute)
[ "cetvrtak 8:00 GMT"
]
, examples (datetime (2013, 2, 12, 14, 0, 0) Hour)
[ "danas u 14"
, "u 2 poslijepodne"
]
, examples (datetime (2013, 4, 25, 16, 0, 0) Hour)
[ "25/4 U 16 sati"
]
, examples (datetime (2013, 2, 13, 15, 0, 0) Hour)
[ "15 sati sutra"
]
, examples (datetimeOpenInterval After (2013, 2, 17, 4, 0, 0) Hour)
[ "nakon 5 dana"
]
, examples (datetimeOpenInterval Before (2013, 2, 12, 11, 0, 0) Hour)
[ "prije 11 sat"
]
, examples (datetimeInterval ((2013, 2, 12, 12, 0, 0), (2013, 2, 12, 20, 0, 0)) Hour)
[ "ova poslijepodne"
, "ovi popodne"
]
, examples (datetime (2013, 2, 12, 13, 30, 0) Minute)
[ "u 13:30"
, "13:30"
]
, examples (datetime (2013, 2, 12, 4, 45, 0) Second)
[ "za 15 minuta"
]
, examples (datetimeInterval ((2013, 2, 12, 13, 0, 0), (2013, 2, 12, 17, 0, 0)) Hour)
[ "poslije rucka"
]
, examples (datetime (2013, 2, 12, 10, 30, 0) Minute)
[ "10:30"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 0, 0), (2013, 2, 12, 12, 0, 0)) Hour)
[ "ove jutro"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "sljedeci ponedjeljak"
]
, examples (datetime (2013, 2, 12, 12, 0, 0) Hour)
[ "u 12"
, "u podne"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Hour)
[ "u 12 u noci"
, "u ponoc"
]
, examples (datetime (2013, 3, 0, 0, 0, 0) Month)
[ "ozujak"
, "u ozujku"
]
]
| rfranek/duckling | Duckling/Time/HR/Corpus.hs | bsd-3-clause | 23,505 | 0 | 11 | 9,825 | 7,803 | 4,744 | 3,059 | 487 | 1 |
{-# OPTIONS -Wall #-}
-----------------------------------------------------------------------------
-- |
-- Module : CTest.hs (executable)
-- Copyright : (c) 2008 Duncan Coutts, Benedikt Huber
-- License : BSD-style
-- Maintainer : [email protected]
-- Portability : non-portable (Data.Generics)
--
-- This is a very simple module, usable for quick tests.
--
-- It provides a wrapper for parsing C-files which haven't been preprocessed yet.
-- It is used as if gcc was called, and internally calls cpp (gcc -E) to preprocess the file.
-- It then outputs the pretty printed AST, replacing declarations from included header
-- files with a corresponding #include directive (This isn't always correct, as e.g. #define s
-- get lost. But it makes it a lot easier to focus on the relevant part of the output).
--
-- If used with a `-e str' command-line argument, the given string is parsed as an expression and pretty
-- printed. Similar for `-d str' and top-level declarations.
-------------------------------------------------------------------------------------------------------
module Main (
main
) where
import Language.C
import Language.C.Parser
import Language.C.System.GCC
import Language.C.Analysis
import Language.C.Test.Environment
import Language.C.Test.GenericAST
import Control.Monad
import System.Environment (getEnv, getArgs)
import System.Exit
import System.IO
import Data.Generics
import Text.PrettyPrint.HughesPJ
data CTestConfig =
CTestConfig {
debugFlag :: Bool,
parseOnlyFlag :: Bool,
useIncludes :: Bool,
dumpAst :: Bool,
semanticAnalysis :: Bool
}
usage :: String -> IO a
usage msg = printUsage >> exitWith (ExitFailure 2) where
printUsage = hPutStr stderr . unlines $
[ "! "++msg,"",
"Usage: ./CTest -e expression",
"Usage: ./CTest -s statement",
"Usage: ./CTest -d declaration",
"Usage: ./CTest [cpp-opts] file.(c|hc|i)",
" parses the given C source file and pretty print the AST",
"Environment Variables (some do not apply with -e,-s or -d): ",
" TMPDIR: temporary directory for preprocessing",
" NO_HEADERS_VIA_INCLUDE: do not use heuristic #include directives for pretty printing",
" DEBUG: debug flag",
" DUMP_AST: dump the ast to file dump.ast",
" NO_SEMANTIC_ANALYSIS: do not perform semantic analysis",
" PARSE_ONLY: do not pretty print"
]
bailOut :: (Show err) => err -> IO a
bailOut err = do
hPutStrLn stderr (show err)
hPutStrLn stderr "*** Exit on Error ***"
exitWith (ExitFailure 1)
main :: IO ()
main = do
tmpdir <- getEnv "TMPDIR"
dbg <- getEnvFlag "DEBUG"
parseonly <- getEnvFlag "PARSE_ONLY"
dumpast <- getEnvFlag "DUMP_AST"
no_includes <- getEnvFlag "NO_HEADERS_VIA_INCLUDE"
semantic <- liftM not (getEnvFlag "NO_SEMANTIC_ANALYSIS")
let config = CTestConfig dbg parseonly (not $ no_includes) dumpast semantic
args <- getArgs
(file,ast) <-
case args of
("-e":str:[]) -> runP config expressionP str >> exitWith ExitSuccess
("-s":str:[]) -> runP config statementP str >> exitWith ExitSuccess
("-d":str:[]) -> runP config extDeclP str >> exitWith ExitSuccess
otherArgs ->
case mungeCcArgs args of
Groked [cFile] gccOpts -> do
presult <- parseCFile (newGCC "gcc") (Just tmpdir) gccOpts cFile
either bailOut (return.((,) cFile)) presult
Groked cFiles _ -> usage $ "More than one source file given: " ++ unwords cFiles
Ignore -> usage $ "Not input files given"
Unknown reason -> usage $ "Could not process arguments: " ++ reason
output config file ast
runP :: (CNode a, Pretty a, Data a) => CTestConfig -> P a -> String -> IO ()
runP config parser str =
do
ast <- either bailOut return $ pResult
when (dumpAst config) $ writeFile "dump.ast" (gshow ast)
when (not $ parseOnlyFlag config) $ print (pretty ast)
where
is = inputStreamFromString str
pResult = execParser_ parser is (argPos)
argPos = initPos "<cmd-line-arg>"
output :: CTestConfig -> FilePath -> CTranslUnit -> IO ()
output config file ast = do
when (dumpAst config) $ writeFile "dump.ast" (gshow ast)
when (semanticAnalysis config && (not (null file))) $ do
let result = runTrav_ (analyseAST ast)
case result of
Left errs -> hPutStrLn stderr (show errs)
Right (ok,warnings) -> do mapM (hPutStrLn stderr . show) warnings
printStats file ok
when (not $ parseOnlyFlag config) $
print $ (if useIncludes config then prettyUsingInclude else pretty) ast
when (debugFlag config) $ putStrLn . comment . show . pretty . mkGenericCAST $ ast
comment str = "/*\n" ++ str ++ "\n*/"
printStats file = putStrLn . comment . show
. prettyAssocsWith "global decl stats" text (text.show)
. globalDeclStats (== file) | jthornber/language-c-ejt | test/src/CTest.hs | bsd-3-clause | 4,993 | 1 | 21 | 1,166 | 1,156 | 588 | 568 | 92 | 7 |
;
; HSP help managerp HELP\[Xt@C
; (檪u;vÌsÍRgƵijêÜ·)
;
%type
g£½ß
%ver
3.3
%note
llmod3.hsp,scrsvr.hspðCN[h·é
(Windows9xÌÝpÂ\Å·)
%date
2009/08/01
%author
tom
%dll
llmod3
%url
http://www5b.biglobe.ne.jp/~diamond/hsp/hsp2file.htm
%index
ss_running
XN[Z[o[ªì®µÄ¢é©VXeÉmç¹é
%group
OSVXe§ä½ß
%prm
n1
n1 : 쮵Ģé©A¢È¢©ÌtO
%inst
XN[Z[o[ªì®µÄ¢é©VXe(Windows)Émç¹Ü·B
n1É1ðãü·éÆAVXe(Windows)ÉXN[Z[o[ªì®Å 鱯ðmç¹Ü·B
n1É0ðãü·éÆAXN[Z[o[Í쮵ÄÈ¢AÆVXeÉmç¹Ü·B
^
¦ n1ð1ɵı̽ßðÀs·éÆAALT+CTRL+DEL,ALT+TAB,win{^ÈÇÌL[ªø©ÈÈèÜ·B
n1ð1ɵı̽ßðÀsµ½çAK¸n1ð0ɵÄà¤êx±Ì½ßðÀsµÄ¾³¢B
^
±Ì½ßðÄÑoµ½ãÌstatÌl
0 G[
0ÈO G[³µ
%index
ss_chkpwd
WindowsWÌpX[h`FbN_CAO
%group
OSVXe§ä½ß
%inst
WindowsWÌpX[h`FbN_CAOðÄÑoµÜ·B
½¾µARg[plÌ'æÊÌvpeB'Å'Ê߽ܰÄÞÉæéÛì'ª`FbN³êÄ¢éêÌÝÅ·B
^
±Ì½ßðÄÑoµ½ãÌstatÌl
0 LZ³ê½
0ÈO ³mÈpX[hªüͳê½
('Ê߽ܰÄÞÉæéÛì'ª`FbN³êĢȢêàÜÞ)
%index
ss_chgpwd
WindowsWÌpX[hÏX_CAO
%group
OSVXe§ä½ß
%inst
WindowsWÌpX[hÏX_CAOðÄÑoµÜ·B
^
±Ì½ßðÄÑoµ½ãÌstatÌl
0 pX[hªÏX³ê½
0ÈO LZ³ê½
%href
ss_chkpwd
| zakki/openhsp | package/hsphelp/llmod3_scrsvr.hs | bsd-3-clause | 1,685 | 707 | 16 | 214 | 1,605 | 916 | 689 | -1 | -1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Control.Fay
(
ap
,foldM
,zipWithM
,zipWithM_
,replicateM
)
where
import FFI
import Prelude hiding (mapM)
ap :: Fay (a -> b) -> Fay a -> Fay b
ap f x = f >>= \f' -> x >>= \x' -> return (f' x')
foldM :: (a -> b -> Fay a) -> a -> [b] -> Fay a
foldM _ x [] = return x
foldM f y (x:xs) = f y x >>= \z -> foldM f z xs
zipWithM :: (a -> b -> Fay c) -> [a] -> [b] -> Fay [c]
zipWithM = ((sequence .) .) . zipWith
zipWithM_ :: (a -> b -> Fay c) -> [a] -> [b] -> Fay ()
zipWithM_ = ((sequence_ .) .) . zipWith
replicateM :: Int -> Fay a -> Fay [a]
replicateM = (sequence .) . replicate
| crooney/cinder | src/Control/Fay.hs | bsd-3-clause | 677 | 0 | 11 | 202 | 350 | 190 | 160 | 21 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Ordinal.IT.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Ordinal.IT.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "IT Tests"
[ makeCorpusTest [Seal Ordinal] corpus
]
| facebookincubator/duckling | tests/Duckling/Ordinal/IT/Tests.hs | bsd-3-clause | 504 | 0 | 9 | 78 | 79 | 50 | 29 | 11 | 1 |
module HasTorrent
(
module HasTorrent.Types,
module HasTorrent.Types.TypesHelp,
module HasTorrent.Bencode,
module HasTorrent.Network.PeerProtocol,
module HasTorrent.Network.Communicate,
module HasTorrent.Tracker,
) where
import HasTorrent.Types
import HasTorrent.Types.TypesHelp
import HasTorrent.Bencode
import HasTorrent.Network.PeerProtocol
import HasTorrent.Network.Communicate
import HasTorrent.Tracker
| vaishious/has-torrent | src/HasTorrent.hs | bsd-3-clause | 464 | 0 | 5 | 85 | 79 | 53 | 26 | 14 | 0 |
{-# LANGUAGE GADTs, TypeFamilies, TypeOperators, EmptyDataDecls, FlexibleInstances, MultiParamTypeClasses, RankNTypes, QuasiQuotes, TemplateHaskell, ViewPatterns #-}
-------------------------------------------------------------------------
-- lambda lifting for the lambda calculus with top-level declarations
-------------------------------------------------------------------------
module LambdaLifting where
import Ctx
import HobbitLibTH
import Data.List
import Control.Monad.Reader
import Control.Monad.Cont
import Control.Monad.Identity
-- dummy datatypes for distinguishing Decl names from Lam names
data L a
data D a
-- terms with top-level names
data DTerm a where
Var :: Name (L a) -> DTerm a
DVar :: Name (D a) -> DTerm a
Lam :: Binding (L a) (DTerm b) -> DTerm (a -> b)
App :: DTerm (a -> b) -> DTerm a -> DTerm b
instance Show (DTerm a) where
show = pretty
-- top-level declarations with a "return value"
data Decls a where
DeclsBase :: DTerm a -> Decls a
DeclsCons :: DTerm b -> Binding (D b) (Decls a) -> Decls a
instance Show (Decls a) where
show = decls_pretty
-- helper functions to build terms without explicitly using nu or Var
lam :: (DTerm a -> DTerm b) -> DTerm (a -> b)
lam f = Lam $ nu (f . Var)
------------------------------------------------------------
-- pretty printing
------------------------------------------------------------
-- to make a function for MapCtx (for pretty)
newtype StringF x = StringF String
unStringF (StringF str) = str
-- pretty print terms
pretty :: DTerm a -> String
pretty t = mpretty (emptyMb t) emptyMC 0
mpretty :: Mb ctx (DTerm a) -> MapCtx StringF ctx -> Int -> String
mpretty [nuQQ| Var b |] varnames n =
mprettyName (mbNameBoundP b) varnames
mpretty [nuQQ| DVar b |] varnames n =
mprettyName (mbNameBoundP b) varnames
mpretty [nuQQ| Lam b |] varnames n =
let x = "x" ++ show n in
"(\\" ++ x ++ "." ++ mpretty (combineMb b) (varnames :> (StringF x)) (n+1) ++ ")"
mpretty [nuQQ| App b1 b2 |] varnames n =
"(" ++ mpretty b1 varnames n
++ " " ++ mpretty b2 varnames n ++ ")"
mprettyName (Left pf) varnames = unStringF (ctxLookup pf varnames)
mprettyName (Right n) varnames = "##free var: " ++ (show n) ++ "##"
-- pretty print decls
decls_pretty :: Decls a -> String
decls_pretty decls =
"[ decls:\n" ++ (mdecls_pretty (emptyMb decls) emptyMC 0) ++ "]"
mdecls_pretty :: Mb ctx (Decls a) -> MapCtx StringF ctx -> Int -> String
mdecls_pretty [nuQQ| DeclsBase t |] varnames n =
(mpretty t varnames 0) ++ "\n"
mdecls_pretty [nuQQ| DeclsCons term rest |] varnames n =
let fname = "F" ++ show n in
fname ++ " = " ++ (mpretty term varnames 0) ++ "\n\n"
++ mdecls_pretty (combineMb rest) (varnames :> (StringF fname)) (n+1)
------------------------------------------------------------
-- "peeling" lambdas off of a term
------------------------------------------------------------
type family AddArrows ctx b
type instance AddArrows CtxNil b = b
type instance AddArrows (CtxCons ctx (L a)) b = AddArrows ctx (a -> b)
data PeelRet ctx a where
PeelRet :: LCtx lam_ctx -> Mb (ctx :++: lam_ctx) (DTerm a) ->
PeelRet ctx (AddArrows lam_ctx a)
peelLambdas :: LCtx lam_ctx -> Mb (ctx :++: lam_ctx) (DTerm a) ->
PeelRet ctx (AddArrows lam_ctx a)
peelLambdas lctx [nuQQ| Lam b |] =
peelLambdas (lctx :> IsLType) (combineMb b)
peelLambdas lctx [nuQQ| b |] = PeelRet lctx b
addLams :: LCtx lam_ctx -> (MapCtx Name lam_ctx -> DTerm a) ->
DTerm (AddArrows lam_ctx a)
addLams EmptyMC k = k emptyMC
addLams (lam_ctx :> IsLType) k =
addLams lam_ctx (\names -> Lam $ nu $ \x -> k (names :> x))
------------------------------------------------------------
-- sub-contexts
------------------------------------------------------------
-- FIXME: use this type in place of functions
type SubCtx ctx' ctx = MapCtx Name ctx -> MapCtx Name ctx'
subCtxConsBoth :: SubCtx ctx' ctx -> SubCtx (CtxCons ctx' a) (CtxCons ctx a)
subCtxConsBoth subCtx = \(ctx :> x) -> subCtx ctx :> x
subCtxConsR :: SubCtx ctx' ctx -> SubCtx ctx' (CtxCons ctx a)
subCtxConsR subCtx = \(ctx :> _) -> subCtx ctx
------------------------------------------------------------
-- operations on contexts of free variables
------------------------------------------------------------
{-
-- exists a sub-context of fvs
data ExSubFVs ctx fvs where
ExSubFVs :: MapCtx (MbLName ctx) fvs' -> SubCtx fvs' fvs ->
ExSubFVs ctx fvs
-- add an FV to an ExSubFVs
exSubFVsCons :: ExSubFVs ctx fvs -> MbLName ctx a -> ExSubFVs ctx (CtxCons fvs a)
exSubFVsCons (ExSubFVs fvs subCtx) n =
ExSubFVs (fvs :> n) (subCtxConsBoth subCtx)
-- don't add the FV, just extend the type
exSubFVsWeaken :: ExSubFVs ctx fvs -> ExSubFVs ctx (CtxCons fvs a)
exSubFVsWeaken (ExSubFVs fvs subCtx) =
ExSubFVs fvs (subCtxConsR subCtx)
-- removing a name from a context of fvs
remMbLName :: MapCtx (MbLName ctx) fvs -> MbLName ctx a -> ExSubFVs ctx fvs
remMbLName EmptyMC _ = ExSubFVs EmptyMC id
remMbLName (fvs :> MbLName fv) (MbLName n) =
case mbCmpName fv n of
Just _ -> exSubFVsWeaken $ remMbLName fvs (MbLName n)
Nothing -> exSubFVsCons (remMbLName fvs (MbLName n)) (MbLName fv)
-}
type FVList ctx fvs = MapCtx (MbLName ctx) fvs
-- unioning free variable contexts: the data structure
data FVUnionRet ctx fvs1 fvs2 where
FVUnionRet :: FVList ctx fvs -> SubCtx fvs1 fvs -> SubCtx fvs2 fvs ->
FVUnionRet ctx fvs1 fvs2
fvUnion :: FVList ctx fvs1 -> FVList ctx fvs2 ->
FVUnionRet ctx fvs1 fvs2
fvUnion EmptyMC EmptyMC =
FVUnionRet EmptyMC (\_ -> EmptyMC) (\_ -> EmptyMC)
fvUnion EmptyMC (fvs2 :> fv2) =
case fvUnion EmptyMC fvs2 of
FVUnionRet fvs f1 f2 ->
case elemMC fv2 fvs of
Nothing -> FVUnionRet (fvs :> fv2) (\(xs :> x) -> f1 xs) (\(xs :> x) -> f2 xs :> x)
Just idx -> FVUnionRet fvs f1 (\xs -> f2 xs :> ctxLookup idx xs)
fvUnion (fvs1 :> fv1) fvs2 =
case fvUnion fvs1 fvs2 of
FVUnionRet fvs f1 f2 ->
case elemMC fv1 fvs of
Nothing -> FVUnionRet (fvs :> fv1) (\(xs :> x) -> f1 xs :> x) (\(xs :> x) -> f2 xs)
Just idx -> FVUnionRet fvs (\xs -> f1 xs :> ctxLookup idx xs) f2
elemMC :: MbLName ctx a -> FVList ctx fvs -> Maybe (InCtx fvs a)
elemMC _ EmptyMC = Nothing
elemMC mbLN@(MbLName n) (mc :> MbLName n') =
case mbCmpName n n' of
Just Refl -> Just InCtxBase
Nothing -> fmap InCtxStep (elemMC mbLN mc)
------------------------------------------------------------
-- deBruijn terms, i.e., closed terms
------------------------------------------------------------
data IsLType a where IsLType :: IsLType (L a)
type LCtx ctx = MapCtx IsLType ctx
data MbLName ctx a where
MbLName :: Mb ctx (Name (L a)) -> MbLName ctx (L a)
fvsToLCtx :: FVList ctx lctx -> LCtx lctx
fvsToLCtx = ctxMap mbLNameToProof where
mbLNameToProof :: MbLName ctx a -> IsLType a
mbLNameToProof (MbLName _) = IsLType
data DBTerm ctx a where
DBWeaken :: SubCtx ctx1 ctx -> DBTerm ctx1 a -> DBTerm ctx a
DBVar :: InCtx ctx (L a) -> DBTerm ctx a
DBDVar :: Name (D a) -> DBTerm ctx a
DBApp :: DBTerm ctx (a -> b) -> DBTerm ctx a -> DBTerm ctx b
dbSubst :: DBTerm ctx a -> MapCtx Name ctx -> DTerm a
dbSubst (DBWeaken f db) names = dbSubst db $ f names
dbSubst (DBVar inCtx) names = Var $ ctxLookup inCtx names
dbSubst (DBDVar dVar) _ = DVar dVar
dbSubst (DBApp db1 db2) names =
App (dbSubst db1 names) (dbSubst db2 names)
-- applying a DBTerm to a context of names
dbAppMultiNames :: DBTerm fvs (AddArrows fvs a) -> FVList ctx fvs ->
DBTerm fvs a
dbAppMultiNames db args = dbAppMultiNamesH db args (ctxToInCtxs args)
dbAppMultiNamesH :: DBTerm fvs (AddArrows args a) ->
FVList ctx args -> MapCtx (InCtx fvs) args ->
DBTerm fvs a
dbAppMultiNamesH fun EmptyMC _ = fun
dbAppMultiNamesH fun (args :> MbLName _) (inCtxs :> inCtx) =
DBApp (dbAppMultiNamesH fun args inCtxs) (DBVar inCtx)
ctxToInCtxs :: MapCtx f ctx -> MapCtx (InCtx ctx) ctx
ctxToInCtxs EmptyMC = EmptyMC
ctxToInCtxs (ctx :> _) = ctxMap InCtxStep (ctxToInCtxs ctx) :> InCtxBase
------------------------------------------------------------
-- DBTerms combined with their free variables
------------------------------------------------------------
data FVDBTerm ctx lctx a where
FVDBTerm :: FVList ctx fvs -> DBTerm (fvs :++: lctx) a ->
FVDBTerm ctx lctx a
fvDBSepLVars :: MapCtx f lctx -> FVDBTerm (ctx :++: lctx) CtxNil a ->
FVDBTerm ctx lctx a
fvDBSepLVars lctx (FVDBTerm fvs db) =
case fvDBSepLVarsH lctx Tag fvs of
SepRet fvs' f -> FVDBTerm fvs' (DBWeaken f db)
data SepRet lctx ctx fvs where
SepRet :: FVList ctx fvs' -> SubCtx fvs (fvs' :++: lctx) ->
SepRet lctx ctx fvs
fvDBSepLVarsH :: MapCtx f lctx -> Tag ctx -> FVList (ctx :++: lctx) fvs ->
SepRet lctx ctx fvs
fvDBSepLVarsH _ _ EmptyMC = SepRet EmptyMC (\_ -> EmptyMC)
fvDBSepLVarsH lctx ctx (fvs :> fv@(MbLName n)) =
case fvDBSepLVarsH lctx ctx fvs of
SepRet m f ->
case raiseAppName (ctxAppendL ctx lctx) n of
Left idx -> SepRet m (\xs -> f xs :> ctxLookup (weakenInCtxL (ctxTag m) idx) xs)
Right n ->
SepRet (m :> MbLName n)
(\xs -> case mapCtxSplit (ctxAppendL (ctxConsTag (ctxTag m) fv) lctx) xs of
(fvs' :> fv', lctxs) -> f (ctxAppend fvs' lctxs) :> fv')
raiseAppName :: IsAppend ctx1 ctx2 ctx -> Mb ctx (Name a) ->
Either (InCtx ctx2 a) (Mb ctx1 (Name a))
raiseAppName isApp n =
case mbToplevel $(superComb [| mbNameBoundP |]) (separateMb isApp n) of
[nuQQ| Left inCtx |] -> Left $ mbInCtx inCtx
[nuQQ| Right n |] -> Right n
{-
lowerFVs :: FVList ctx fvs -> MapCtx (MbLName (CtxCons ctx a)) fvs
lowerFVs EmptyMC = EmptyMC
lowerFVs (fvs :> MbLName n) =
lowerFVs fvs :>
MbLName (combineMb $ mbToplevel $(superComb [| nu . const |]) n)
lowerMultiL :: MapCtx f ctx -> a -> Mb ctx a
lowerMultiL EmptyMC x = emptyMb x
lowerMultiL (ctx :> _) x = combineMb $ lowerMultiL ctx $ nu $ \_ -> x
mkFV :: MapCtx f ctx -> MbLName (CtxCons ctx (L a)) (L a)
mkFV ctx = MbLName $ combineMb $ lowerMultiL ctx (nu $ \n -> n)
mkFVs :: LCtx ctx -> LCtx ctx2 -> MapCtx (MbLName (ctx :++: ctx2)) ctx2
mkFVs ctx EmptyMC = EmptyMC
mkFVs ctx (ctx2 :> IsLType) =
lowerFVs (mkFVs ctx ctx2) :> (mkFV $ ctxAppend ctx ctx2)
raiseFVs :: Tag fvs -> LCtx lctx ->
MapCtx (MbLName (ctx :++: lctx)) (fvs :++: lctx) ->
FVList ctx fvs
raiseFVs = undefined
fvDBSepLVars :: LCtx ctx -> LCtx lctx -> FVDBTerm (ctx :++: lctx) CtxNil a ->
FVDBTerm ctx lctx a
fvDBSepLVars ctx lctx (FVDBTerm fvs db) =
undefined
-}
{-
helper1 lctx db $ fvUnion fvs $ mkFVs ctx lctx where
helper1 :: LCtx lctx -> DBTerm fvs a ->
FVUnionRet (ctx :++: lctx) fvs lctx ->
FVDBTerm ctx lctx a
helper1 lctx db (FVUnionRet fvs' sub1 sub2) =
FVDBTerm (raiseFVs tag lctx fvs') (DBWeaken sub1 db)
-}
------------------------------------------------------------
-- lambda-lifting, woo hoo!
------------------------------------------------------------
-- this cannot ever happen (there is no ctor for InCtx CtxNil a)
inCtxNil :: InCtx CtxNil a -> b
inCtxNil _ = undefined
dInLCtx :: LCtx ctx -> InCtx ctx (D a) -> b
dInLCtx EmptyMC inCtx = inCtxNil inCtx
dInLCtx (lctx :> IsLType) (InCtxStep inCtx) = dInLCtx lctx inCtx
type LLBodyRet b ctx a = Cont (Decls b) (FVDBTerm ctx CtxNil a)
felleisenC :: ((a -> Decls b) -> Decls b) -> Cont (Decls b) a
felleisenC f = ContT (\k -> Identity (f (runIdentity . k)))
llBody :: LCtx ctx -> Mb ctx (DTerm a) -> LLBodyRet b ctx a
llBody ctx [nuQQ| Var v |] =
return $ FVDBTerm (EmptyMC :> MbLName v) $ DBVar InCtxBase
llBody ctx [nuQQ| DVar d |] =
case mbNameBoundP d of
Right d -> return $ FVDBTerm EmptyMC $ DBDVar d
Left inCtx -> dInLCtx ctx inCtx
llBody ctx [nuQQ| App t1 t2 |] = do
FVDBTerm fvs1 db1 <- llBody ctx t1
FVDBTerm fvs2 db2 <- llBody ctx t2
FVUnionRet names sub1 sub2 <- return $ fvUnion fvs1 fvs2
return $ FVDBTerm names $ DBApp (DBWeaken sub1 db1) (DBWeaken sub2 db2)
llBody ctx lam @ [nuQQ| Lam _ |] = do
PeelRet lctx body <- return $ peelLambdas EmptyMC lam
llret <- llBody (ctxAppend ctx lctx) body
FVDBTerm fvs db <- return $ fvDBSepLVars lctx llret
felleisenC $ \k ->
DeclsCons (addLams (fvsToLCtx fvs) $ \names1 ->
addLams lctx $ \names2 ->
dbSubst db (ctxAppend names1 names2))
$ nu $ \d -> k $ FVDBTerm fvs (dbAppMultiNames (DBDVar d) fvs)
-- the top-level lambda-lifting function
lambdaLift :: DTerm a -> Decls a
lambdaLift t =
runCont (llBody EmptyMC (emptyMb t))
(\(FVDBTerm fvs db) ->
let none = ctxMap (\(MbLName mbn) -> elimEmptyMb mbn) fvs
in DeclsBase (dbSubst db none))
------------------------------------------------------------
-- lambda-lifting insde bindings
------------------------------------------------------------
mbLambdaLift :: Mb ctx (DTerm a) -> Mb ctx (Decls a)
mbLambdaLift = mbToplevel $(superComb [| lambdaLift |])
lambdaLiftDecls :: Decls a -> Decls a
lambdaLiftDecls (DeclsBase t) = lambdaLift t
lambdaLiftDecls (DeclsCons t rest) =
DeclsCons t $ mbToplevel $(superComb [| lambdaLiftDecls |]) rest
-- modules
data Module a where
Functor :: Binding (L a) (Module b) -> (Module b)
Module :: Decls a -> Module a
lambdaLiftModule :: Module a -> Module a
lambdaLiftModule (Module d) = Module $ lambdaLiftDecls d
lambdaLiftModule (Functor b) =
Functor $ mbToplevel $(superComb [| lambdaLiftModule |]) b
------------------------------------------------------------
-- examples
------------------------------------------------------------
ex1 = lam (\f -> (lam $ \x -> App f x))
res1 = lambdaLift ex1
ex2 = lam (\f1 -> App f1 (lam (\f2 -> lam (\x -> App f2 x))))
res2 = lambdaLift ex2
ex3 = lam (\x -> lam (\f1 -> App f1 (lam (\f2 -> lam (\y -> f2 `App` x `App` y)))))
res3 = lambdaLift ex3
ex4 = lam (\x -> lam (\f1 -> App f1 (lam (\f2 -> lam (\y -> f2 `App` (f1 `App` x `App` y))))))
res4 = lambdaLift ex4
ex5 = lam (\f1 -> lam $ \f2 -> App f1 (lam $ \x -> App f2 x))
res5 = lambdaLift ex5
-- lambda-lift with a free variable
ex6 = nu (\f -> App (Var f) (lam $ \x -> x))
res6 = mbToplevel $(superComb [| lambdaLift |]) ex6
| eddywestbrook/hobbits | archival/LambdaLiftingDB.hs | bsd-3-clause | 14,594 | 122 | 18 | 3,324 | 4,321 | 2,247 | 2,074 | -1 | -1 |
module Print3Flipped where
myGreeting :: String
myGreeting = (++) "hello" " world"
hello::String
hello="hello"
world ::String
world = "world!"
main :: IO()
main = do
putStrLn myGreeting
putStrLn secondGreetiong
where secondGreetiong = (++) hello ((++) " " world)
| dhaneshkk/haskell-programming | print3Flippedl.hs | bsd-3-clause | 298 | 0 | 9 | 73 | 92 | 51 | 41 | 12 | 1 |
{-
(c) The AQUA Project, Glasgow University, 1993-1998
\section[SimplUtils]{The simplifier utilities}
-}
{-# LANGUAGE CPP #-}
module SimplUtils (
-- Rebuilding
mkLam, mkCase, prepareAlts, tryEtaExpandRhs,
-- Inlining,
preInlineUnconditionally, postInlineUnconditionally,
activeUnfolding, activeRule,
getUnfoldingInRuleMatch,
simplEnvForGHCi, updModeForStableUnfoldings, updModeForRules,
-- The continuation type
SimplCont(..), DupFlag(..),
isSimplified,
contIsDupable, contResultType, contHoleType,
contIsTrivial, contArgs,
countArgs,
mkBoringStop, mkRhsStop, mkLazyArgStop, contIsRhsOrArg,
interestingCallContext,
-- ArgInfo
ArgInfo(..), ArgSpec(..), mkArgInfo,
addValArgTo, addCastTo, addTyArgTo,
argInfoExpr, argInfoAppArgs, pushSimplifiedArgs,
abstractFloats
) where
#include "HsVersions.h"
import SimplEnv
import CoreMonad ( SimplifierMode(..), Tick(..) )
import DynFlags
import CoreSyn
import qualified CoreSubst
import PprCore
import CoreFVs
import CoreUtils
import CoreArity
import CoreUnfold
import Name
import Id
import Var
import Demand
import SimplMonad
import Type hiding( substTy )
import Coercion hiding( substCo )
import DataCon ( dataConWorkId )
import VarEnv
import VarSet
import BasicTypes
import Util
import MonadUtils
import Outputable
import Pair
import Control.Monad ( when )
{-
************************************************************************
* *
The SimplCont and DupFlag types
* *
************************************************************************
A SimplCont allows the simplifier to traverse the expression in a
zipper-like fashion. The SimplCont represents the rest of the expression,
"above" the point of interest.
You can also think of a SimplCont as an "evaluation context", using
that term in the way it is used for operational semantics. This is the
way I usually think of it, For example you'll often see a syntax for
evaluation context looking like
C ::= [] | C e | case C of alts | C `cast` co
That's the kind of thing we are doing here, and I use that syntax in
the comments.
Key points:
* A SimplCont describes a *strict* context (just like
evaluation contexts do). E.g. Just [] is not a SimplCont
* A SimplCont describes a context that *does not* bind
any variables. E.g. \x. [] is not a SimplCont
-}
data SimplCont
= Stop -- An empty context, or <hole>
OutType -- Type of the <hole>
CallCtxt -- Tells if there is something interesting about
-- the context, and hence the inliner
-- should be a bit keener (see interestingCallContext)
-- Specifically:
-- This is an argument of a function that has RULES
-- Inlining the call might allow the rule to fire
-- Never ValAppCxt (use ApplyToVal instead)
-- or CaseCtxt (use Select instead)
| CastIt -- <hole> `cast` co
OutCoercion -- The coercion simplified
-- Invariant: never an identity coercion
SimplCont
| ApplyToVal { -- <hole> arg
sc_dup :: DupFlag, -- See Note [DupFlag invariants]
sc_arg :: InExpr, -- The argument,
sc_env :: StaticEnv, -- and its static env
sc_cont :: SimplCont }
| ApplyToTy { -- <hole> ty
sc_arg_ty :: OutType, -- Argument type
sc_hole_ty :: OutType, -- Type of the function, presumably (forall a. blah)
-- See Note [The hole type in ApplyToTy]
sc_cont :: SimplCont }
| Select { -- case <hole> of alts
sc_dup :: DupFlag, -- See Note [DupFlag invariants]
sc_bndr :: InId, -- case binder
sc_alts :: [InAlt], -- Alternatives
sc_env :: StaticEnv, -- and their static environment
sc_cont :: SimplCont }
-- The two strict forms have no DupFlag, because we never duplicate them
| StrictBind -- (\x* \xs. e) <hole>
InId [InBndr] -- let x* = <hole> in e
InExpr StaticEnv -- is a special case
SimplCont
| StrictArg -- f e1 ..en <hole>
ArgInfo -- Specifies f, e1..en, Whether f has rules, etc
-- plus strictness flags for *further* args
CallCtxt -- Whether *this* argument position is interesting
SimplCont
| TickIt
(Tickish Id) -- Tick tickish <hole>
SimplCont
data DupFlag = NoDup -- Unsimplified, might be big
| Simplified -- Simplified
| OkToDup -- Simplified and small
isSimplified :: DupFlag -> Bool
isSimplified NoDup = False
isSimplified _ = True -- Invariant: the subst-env is empty
perhapsSubstTy :: DupFlag -> StaticEnv -> Type -> Type
perhapsSubstTy dup env ty
| isSimplified dup = ty
| otherwise = substTy env ty
{-
Note [DupFlag invariants]
~~~~~~~~~~~~~~~~~~~~~~~~~
In both (ApplyToVal dup _ env k)
and (Select dup _ _ env k)
the following invariants hold
(a) if dup = OkToDup, then continuation k is also ok-to-dup
(b) if dup = OkToDup or Simplified, the subst-env is empty
(and and hence no need to re-simplify)
-}
instance Outputable DupFlag where
ppr OkToDup = text "ok"
ppr NoDup = text "nodup"
ppr Simplified = text "simpl"
instance Outputable SimplCont where
ppr (Stop ty interesting) = text "Stop" <> brackets (ppr interesting) <+> ppr ty
ppr (CastIt co cont ) = (text "CastIt" <+> ppr co) $$ ppr cont
ppr (TickIt t cont) = (text "TickIt" <+> ppr t) $$ ppr cont
ppr (ApplyToTy { sc_arg_ty = ty, sc_cont = cont })
= (text "ApplyToTy" <+> pprParendType ty) $$ ppr cont
ppr (ApplyToVal { sc_arg = arg, sc_dup = dup, sc_cont = cont })
= (text "ApplyToVal" <+> ppr dup <+> pprParendExpr arg)
$$ ppr cont
ppr (StrictBind b _ _ _ cont) = (text "StrictBind" <+> ppr b) $$ ppr cont
ppr (StrictArg ai _ cont) = (text "StrictArg" <+> ppr (ai_fun ai)) $$ ppr cont
ppr (Select { sc_dup = dup, sc_bndr = bndr, sc_alts = alts, sc_env = se, sc_cont = cont })
= (text "Select" <+> ppr dup <+> ppr bndr) $$
ifPprDebug (nest 2 $ vcat [ppr (seTvSubst se), ppr alts]) $$ ppr cont
{- Note [The hole type in ApplyToTy]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The sc_hole_ty field of ApplyToTy records the type of the "hole" in the
continuation. It is absolutely necessary to compute contHoleType, but it is
not used for anything else (and hence may not be evaluated).
Why is it necessary for contHoleType? Consider the continuation
ApplyToType Int (Stop Int)
corresponding to
(<hole> @Int) :: Int
What is the type of <hole>? It could be (forall a. Int) or (forall a. a),
and there is no way to know which, so we must record it.
In a chain of applications (f @t1 @t2 @t3) we'll lazily compute exprType
for (f @t1) and (f @t1 @t2), which is potentially non-linear; but it probably
doesn't matter because we'll never compute them all.
************************************************************************
* *
ArgInfo and ArgSpec
* *
************************************************************************
-}
data ArgInfo
= ArgInfo {
ai_fun :: OutId, -- The function
ai_args :: [ArgSpec], -- ...applied to these args (which are in *reverse* order)
ai_type :: OutType, -- Type of (f a1 ... an)
ai_rules :: [CoreRule], -- Rules for this function
ai_encl :: Bool, -- Flag saying whether this function
-- or an enclosing one has rules (recursively)
-- True => be keener to inline in all args
ai_strs :: [Bool], -- Strictness of remaining arguments
-- Usually infinite, but if it is finite it guarantees
-- that the function diverges after being given
-- that number of args
ai_discs :: [Int] -- Discounts for remaining arguments; non-zero => be keener to inline
-- Always infinite
}
data ArgSpec
= ValArg OutExpr -- Apply to this (coercion or value); c.f. ApplyToVal
| TyArg { as_arg_ty :: OutType -- Apply to this type; c.f. ApplyToTy
, as_hole_ty :: OutType } -- Type of the function (presumably forall a. blah)
| CastBy OutCoercion -- Cast by this; c.f. CastIt
instance Outputable ArgSpec where
ppr (ValArg e) = text "ValArg" <+> ppr e
ppr (TyArg { as_arg_ty = ty }) = text "TyArg" <+> ppr ty
ppr (CastBy c) = text "CastBy" <+> ppr c
addValArgTo :: ArgInfo -> OutExpr -> ArgInfo
addValArgTo ai arg = ai { ai_args = ValArg arg : ai_args ai
, ai_type = applyTypeToArg (ai_type ai) arg }
addTyArgTo :: ArgInfo -> OutType -> ArgInfo
addTyArgTo ai arg_ty = ai { ai_args = arg_spec : ai_args ai
, ai_type = piResultTy poly_fun_ty arg_ty }
where
poly_fun_ty = ai_type ai
arg_spec = TyArg { as_arg_ty = arg_ty, as_hole_ty = poly_fun_ty }
addCastTo :: ArgInfo -> OutCoercion -> ArgInfo
addCastTo ai co = ai { ai_args = CastBy co : ai_args ai
, ai_type = pSnd (coercionKind co) }
argInfoAppArgs :: [ArgSpec] -> [OutExpr]
argInfoAppArgs [] = []
argInfoAppArgs (CastBy {} : _) = [] -- Stop at a cast
argInfoAppArgs (ValArg e : as) = e : argInfoAppArgs as
argInfoAppArgs (TyArg { as_arg_ty = ty } : as) = Type ty : argInfoAppArgs as
pushSimplifiedArgs :: SimplEnv -> [ArgSpec] -> SimplCont -> SimplCont
pushSimplifiedArgs _env [] k = k
pushSimplifiedArgs env (arg : args) k
= case arg of
TyArg { as_arg_ty = arg_ty, as_hole_ty = hole_ty }
-> ApplyToTy { sc_arg_ty = arg_ty, sc_hole_ty = hole_ty, sc_cont = rest }
ValArg e -> ApplyToVal { sc_arg = e, sc_env = env, sc_dup = Simplified, sc_cont = rest }
CastBy c -> CastIt c rest
where
rest = pushSimplifiedArgs env args k
-- The env has an empty SubstEnv
argInfoExpr :: OutId -> [ArgSpec] -> OutExpr
-- NB: the [ArgSpec] is reversed so that the first arg
-- in the list is the last one in the application
argInfoExpr fun rev_args
= go rev_args
where
go [] = Var fun
go (ValArg a : as) = go as `App` a
go (TyArg { as_arg_ty = ty } : as) = go as `App` Type ty
go (CastBy co : as) = mkCast (go as) co
{-
************************************************************************
* *
Functions on SimplCont
* *
************************************************************************
-}
mkBoringStop :: OutType -> SimplCont
mkBoringStop ty = Stop ty BoringCtxt
mkRhsStop :: OutType -> SimplCont -- See Note [RHS of lets] in CoreUnfold
mkRhsStop ty = Stop ty RhsCtxt
mkLazyArgStop :: OutType -> CallCtxt -> SimplCont
mkLazyArgStop ty cci = Stop ty cci
-------------------
contIsRhsOrArg :: SimplCont -> Bool
contIsRhsOrArg (Stop {}) = True
contIsRhsOrArg (StrictBind {}) = True
contIsRhsOrArg (StrictArg {}) = True
contIsRhsOrArg _ = False
contIsRhs :: SimplCont -> Bool
contIsRhs (Stop _ RhsCtxt) = True
contIsRhs _ = False
-------------------
contIsDupable :: SimplCont -> Bool
contIsDupable (Stop {}) = True
contIsDupable (ApplyToTy { sc_cont = k }) = contIsDupable k
contIsDupable (ApplyToVal { sc_dup = OkToDup }) = True -- See Note [DupFlag invariants]
contIsDupable (Select { sc_dup = OkToDup }) = True -- ...ditto...
contIsDupable (CastIt _ k) = contIsDupable k
contIsDupable _ = False
-------------------
contIsTrivial :: SimplCont -> Bool
contIsTrivial (Stop {}) = True
contIsTrivial (ApplyToTy { sc_cont = k }) = contIsTrivial k
contIsTrivial (ApplyToVal { sc_arg = Coercion _, sc_cont = k }) = contIsTrivial k
contIsTrivial (CastIt _ k) = contIsTrivial k
contIsTrivial _ = False
-------------------
contResultType :: SimplCont -> OutType
contResultType (Stop ty _) = ty
contResultType (CastIt _ k) = contResultType k
contResultType (StrictBind _ _ _ _ k) = contResultType k
contResultType (StrictArg _ _ k) = contResultType k
contResultType (Select { sc_cont = k }) = contResultType k
contResultType (ApplyToTy { sc_cont = k }) = contResultType k
contResultType (ApplyToVal { sc_cont = k }) = contResultType k
contResultType (TickIt _ k) = contResultType k
contHoleType :: SimplCont -> OutType
contHoleType (Stop ty _) = ty
contHoleType (TickIt _ k) = contHoleType k
contHoleType (CastIt co _) = pFst (coercionKind co)
contHoleType (StrictBind b _ _ se _) = substTy se (idType b)
contHoleType (StrictArg ai _ _) = funArgTy (ai_type ai)
contHoleType (ApplyToTy { sc_hole_ty = ty }) = ty -- See Note [The hole type in ApplyToTy]
contHoleType (ApplyToVal { sc_arg = e, sc_env = se, sc_dup = dup, sc_cont = k })
= mkFunTy (perhapsSubstTy dup se (exprType e))
(contHoleType k)
contHoleType (Select { sc_dup = d, sc_bndr = b, sc_env = se })
= perhapsSubstTy d se (idType b)
-------------------
countArgs :: SimplCont -> Int
-- Count all arguments, including types, coercions, and other values
countArgs (ApplyToTy { sc_cont = cont }) = 1 + countArgs cont
countArgs (ApplyToVal { sc_cont = cont }) = 1 + countArgs cont
countArgs _ = 0
contArgs :: SimplCont -> (Bool, [ArgSummary], SimplCont)
-- Summarises value args, discards type args and coercions
-- The returned continuation of the call is only used to
-- answer questions like "are you interesting?"
contArgs cont
| lone cont = (True, [], cont)
| otherwise = go [] cont
where
lone (ApplyToTy {}) = False -- See Note [Lone variables] in CoreUnfold
lone (ApplyToVal {}) = False
lone (CastIt {}) = False
lone _ = True
go args (ApplyToVal { sc_arg = arg, sc_env = se, sc_cont = k })
= go (is_interesting arg se : args) k
go args (ApplyToTy { sc_cont = k }) = go args k
go args (CastIt _ k) = go args k
go args k = (False, reverse args, k)
is_interesting arg se = interestingArg se arg
-- Do *not* use short-cutting substitution here
-- because we want to get as much IdInfo as possible
-------------------
mkArgInfo :: Id
-> [CoreRule] -- Rules for function
-> Int -- Number of value args
-> SimplCont -- Context of the call
-> ArgInfo
mkArgInfo fun rules n_val_args call_cont
| n_val_args < idArity fun -- Note [Unsaturated functions]
= ArgInfo { ai_fun = fun, ai_args = [], ai_type = fun_ty
, ai_rules = rules, ai_encl = False
, ai_strs = vanilla_stricts
, ai_discs = vanilla_discounts }
| otherwise
= ArgInfo { ai_fun = fun, ai_args = [], ai_type = fun_ty
, ai_rules = rules
, ai_encl = interestingArgContext rules call_cont
, ai_strs = add_type_str fun_ty arg_stricts
, ai_discs = arg_discounts }
where
fun_ty = idType fun
vanilla_discounts, arg_discounts :: [Int]
vanilla_discounts = repeat 0
arg_discounts = case idUnfolding fun of
CoreUnfolding {uf_guidance = UnfIfGoodArgs {ug_args = discounts}}
-> discounts ++ vanilla_discounts
_ -> vanilla_discounts
vanilla_stricts, arg_stricts :: [Bool]
vanilla_stricts = repeat False
arg_stricts
= case splitStrictSig (idStrictness fun) of
(demands, result_info)
| not (demands `lengthExceeds` n_val_args)
-> -- Enough args, use the strictness given.
-- For bottoming functions we used to pretend that the arg
-- is lazy, so that we don't treat the arg as an
-- interesting context. This avoids substituting
-- top-level bindings for (say) strings into
-- calls to error. But now we are more careful about
-- inlining lone variables, so its ok (see SimplUtils.analyseCont)
if isBotRes result_info then
map isStrictDmd demands -- Finite => result is bottom
else
map isStrictDmd demands ++ vanilla_stricts
| otherwise
-> WARN( True, text "More demands than arity" <+> ppr fun <+> ppr (idArity fun)
<+> ppr n_val_args <+> ppr demands )
vanilla_stricts -- Not enough args, or no strictness
add_type_str :: Type -> [Bool] -> [Bool]
-- If the function arg types are strict, record that in the 'strictness bits'
-- No need to instantiate because unboxed types (which dominate the strict
-- types) can't instantiate type variables.
-- add_type_str is done repeatedly (for each call); might be better
-- once-for-all in the function
-- But beware primops/datacons with no strictness
add_type_str _ [] = []
add_type_str fun_ty strs -- Look through foralls
| Just (_, fun_ty') <- splitForAllTy_maybe fun_ty -- Includes coercions
= add_type_str fun_ty' strs
add_type_str fun_ty (str:strs) -- Add strict-type info
| Just (arg_ty, fun_ty') <- splitFunTy_maybe fun_ty
= (str || isStrictType arg_ty) : add_type_str fun_ty' strs
add_type_str _ strs
= strs
{- Note [Unsaturated functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider (test eyeball/inline4)
x = a:as
y = f x
where f has arity 2. Then we do not want to inline 'x', because
it'll just be floated out again. Even if f has lots of discounts
on its first argument -- it must be saturated for these to kick in
-}
{-
************************************************************************
* *
Interesting arguments
* *
************************************************************************
Note [Interesting call context]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want to avoid inlining an expression where there can't possibly be
any gain, such as in an argument position. Hence, if the continuation
is interesting (eg. a case scrutinee, application etc.) then we
inline, otherwise we don't.
Previously some_benefit used to return True only if the variable was
applied to some value arguments. This didn't work:
let x = _coerce_ (T Int) Int (I# 3) in
case _coerce_ Int (T Int) x of
I# y -> ....
we want to inline x, but can't see that it's a constructor in a case
scrutinee position, and some_benefit is False.
Another example:
dMonadST = _/\_ t -> :Monad (g1 _@_ t, g2 _@_ t, g3 _@_ t)
.... case dMonadST _@_ x0 of (a,b,c) -> ....
we'd really like to inline dMonadST here, but we *don't* want to
inline if the case expression is just
case x of y { DEFAULT -> ... }
since we can just eliminate this case instead (x is in WHNF). Similar
applies when x is bound to a lambda expression. Hence
contIsInteresting looks for case expressions with just a single
default case.
-}
interestingCallContext :: SimplCont -> CallCtxt
-- See Note [Interesting call context]
interestingCallContext cont
= interesting cont
where
interesting (Select {}) = CaseCtxt
interesting (ApplyToVal {}) = ValAppCtxt
-- Can happen if we have (f Int |> co) y
-- If f has an INLINE prag we need to give it some
-- motivation to inline. See Note [Cast then apply]
-- in CoreUnfold
interesting (StrictArg _ cci _) = cci
interesting (StrictBind {}) = BoringCtxt
interesting (Stop _ cci) = cci
interesting (TickIt _ k) = interesting k
interesting (ApplyToTy { sc_cont = k }) = interesting k
interesting (CastIt _ k) = interesting k
-- If this call is the arg of a strict function, the context
-- is a bit interesting. If we inline here, we may get useful
-- evaluation information to avoid repeated evals: e.g.
-- x + (y * z)
-- Here the contIsInteresting makes the '*' keener to inline,
-- which in turn exposes a constructor which makes the '+' inline.
-- Assuming that +,* aren't small enough to inline regardless.
--
-- It's also very important to inline in a strict context for things
-- like
-- foldr k z (f x)
-- Here, the context of (f x) is strict, and if f's unfolding is
-- a build it's *great* to inline it here. So we must ensure that
-- the context for (f x) is not totally uninteresting.
interestingArgContext :: [CoreRule] -> SimplCont -> Bool
-- If the argument has form (f x y), where x,y are boring,
-- and f is marked INLINE, then we don't want to inline f.
-- But if the context of the argument is
-- g (f x y)
-- where g has rules, then we *do* want to inline f, in case it
-- exposes a rule that might fire. Similarly, if the context is
-- h (g (f x x))
-- where h has rules, then we do want to inline f; hence the
-- call_cont argument to interestingArgContext
--
-- The ai-rules flag makes this happen; if it's
-- set, the inliner gets just enough keener to inline f
-- regardless of how boring f's arguments are, if it's marked INLINE
--
-- The alternative would be to *always* inline an INLINE function,
-- regardless of how boring its context is; but that seems overkill
-- For example, it'd mean that wrapper functions were always inlined
--
-- The call_cont passed to interestingArgContext is the context of
-- the call itself, e.g. g <hole> in the example above
interestingArgContext rules call_cont
= notNull rules || enclosing_fn_has_rules
where
enclosing_fn_has_rules = go call_cont
go (Select {}) = False
go (ApplyToVal {}) = False -- Shouldn't really happen
go (ApplyToTy {}) = False -- Ditto
go (StrictArg _ cci _) = interesting cci
go (StrictBind {}) = False -- ??
go (CastIt _ c) = go c
go (Stop _ cci) = interesting cci
go (TickIt _ c) = go c
interesting RuleArgCtxt = True
interesting _ = False
{- Note [Interesting arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
An argument is interesting if it deserves a discount for unfoldings
with a discount in that argument position. The idea is to avoid
unfolding a function that is applied only to variables that have no
unfolding (i.e. they are probably lambda bound): f x y z There is
little point in inlining f here.
Generally, *values* (like (C a b) and (\x.e)) deserve discounts. But
we must look through lets, eg (let x = e in C a b), because the let will
float, exposing the value, if we inline. That makes it different to
exprIsHNF.
Before 2009 we said it was interesting if the argument had *any* structure
at all; i.e. (hasSomeUnfolding v). But does too much inlining; see Trac #3016.
But we don't regard (f x y) as interesting, unless f is unsaturated.
If it's saturated and f hasn't inlined, then it's probably not going
to now!
Note [Conlike is interesting]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f d = ...((*) d x y)...
... f (df d')...
where df is con-like. Then we'd really like to inline 'f' so that the
rule for (*) (df d) can fire. To do this
a) we give a discount for being an argument of a class-op (eg (*) d)
b) we say that a con-like argument (eg (df d)) is interesting
-}
interestingArg :: SimplEnv -> CoreExpr -> ArgSummary
-- See Note [Interesting arguments]
interestingArg env e = go env 0 e
where
-- n is # value args to which the expression is applied
go env n (Var v)
| SimplEnv { seIdSubst = ids, seInScope = in_scope } <- env
= case lookupVarEnv ids v of
Nothing -> go_var n (refineFromInScope in_scope v)
Just (DoneId v') -> go_var n (refineFromInScope in_scope v')
Just (DoneEx e) -> go (zapSubstEnv env) n e
Just (ContEx tvs cvs ids e) -> go (setSubstEnv env tvs cvs ids) n e
go _ _ (Lit {}) = ValueArg
go _ _ (Type _) = TrivArg
go _ _ (Coercion _) = TrivArg
go env n (App fn (Type _)) = go env n fn
go env n (App fn _) = go env (n+1) fn
go env n (Tick _ a) = go env n a
go env n (Cast e _) = go env n e
go env n (Lam v e)
| isTyVar v = go env n e
| n>0 = NonTrivArg -- (\x.b) e is NonTriv
| otherwise = ValueArg
go _ _ (Case {}) = NonTrivArg
go env n (Let b e) = case go env' n e of
ValueArg -> ValueArg
_ -> NonTrivArg
where
env' = env `addNewInScopeIds` bindersOf b
go_var n v
| isConLikeId v = ValueArg -- Experimenting with 'conlike' rather that
-- data constructors here
| idArity v > n = ValueArg -- Catches (eg) primops with arity but no unfolding
| n > 0 = NonTrivArg -- Saturated or unknown call
| conlike_unfolding = ValueArg -- n==0; look for an interesting unfolding
-- See Note [Conlike is interesting]
| otherwise = TrivArg -- n==0, no useful unfolding
where
conlike_unfolding = isConLikeUnfolding (idUnfolding v)
{-
************************************************************************
* *
SimplifierMode
* *
************************************************************************
The SimplifierMode controls several switches; see its definition in
CoreMonad
sm_rules :: Bool -- Whether RULES are enabled
sm_inline :: Bool -- Whether inlining is enabled
sm_case_case :: Bool -- Whether case-of-case is enabled
sm_eta_expand :: Bool -- Whether eta-expansion is enabled
-}
simplEnvForGHCi :: DynFlags -> SimplEnv
simplEnvForGHCi dflags
= mkSimplEnv $ SimplMode { sm_names = ["GHCi"]
, sm_phase = InitialPhase
, sm_rules = rules_on
, sm_inline = False
, sm_eta_expand = eta_expand_on
, sm_case_case = True }
where
rules_on = gopt Opt_EnableRewriteRules dflags
eta_expand_on = gopt Opt_DoLambdaEtaExpansion dflags
-- Do not do any inlining, in case we expose some unboxed
-- tuple stuff that confuses the bytecode interpreter
updModeForStableUnfoldings :: Activation -> SimplifierMode -> SimplifierMode
-- See Note [Simplifying inside stable unfoldings]
updModeForStableUnfoldings inline_rule_act current_mode
= current_mode { sm_phase = phaseFromActivation inline_rule_act
, sm_inline = True
, sm_eta_expand = False }
-- For sm_rules, just inherit; sm_rules might be "off"
-- because of -fno-enable-rewrite-rules
where
phaseFromActivation (ActiveAfter _ n) = Phase n
phaseFromActivation _ = InitialPhase
updModeForRules :: SimplifierMode -> SimplifierMode
-- See Note [Simplifying rules]
updModeForRules current_mode
= current_mode { sm_phase = InitialPhase
, sm_inline = False
, sm_rules = False
, sm_eta_expand = False }
{- Note [Simplifying rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When simplifying a rule, refrain from any inlining or applying of other RULES.
Doing anything to the LHS is plain confusing, because it means that what the
rule matches is not what the user wrote. c.f. Trac #10595, and #10528.
Moreover, inlining (or applying rules) on rule LHSs risks introducing
Ticks into the LHS, which makes matching trickier. Trac #10665, #10745.
Doing this to either side confounds tools like HERMIT, which seek to reason
about and apply the RULES as originally written. See Trac #10829.
Note [Inlining in gentle mode]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Something is inlined if
(i) the sm_inline flag is on, AND
(ii) the thing has an INLINE pragma, AND
(iii) the thing is inlinable in the earliest phase.
Example of why (iii) is important:
{-# INLINE [~1] g #-}
g = ...
{-# INLINE f #-}
f x = g (g x)
If we were to inline g into f's inlining, then an importing module would
never be able to do
f e --> g (g e) ---> RULE fires
because the stable unfolding for f has had g inlined into it.
On the other hand, it is bad not to do ANY inlining into an
stable unfolding, because then recursive knots in instance declarations
don't get unravelled.
However, *sometimes* SimplGently must do no call-site inlining at all
(hence sm_inline = False). Before full laziness we must be careful
not to inline wrappers, because doing so inhibits floating
e.g. ...(case f x of ...)...
==> ...(case (case x of I# x# -> fw x#) of ...)...
==> ...(case x of I# x# -> case fw x# of ...)...
and now the redex (f x) isn't floatable any more.
The no-inlining thing is also important for Template Haskell. You might be
compiling in one-shot mode with -O2; but when TH compiles a splice before
running it, we don't want to use -O2. Indeed, we don't want to inline
anything, because the byte-code interpreter might get confused about
unboxed tuples and suchlike.
Note [Simplifying inside stable unfoldings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We must take care with simplification inside stable unfoldings (which come from
INLINE pragmas).
First, consider the following example
let f = \pq -> BIG
in
let g = \y -> f y y
{-# INLINE g #-}
in ...g...g...g...g...g...
Now, if that's the ONLY occurrence of f, it might be inlined inside g,
and thence copied multiple times when g is inlined. HENCE we treat
any occurrence in a stable unfolding as a multiple occurrence, not a single
one; see OccurAnal.addRuleUsage.
Second, we do want *do* to some modest rules/inlining stuff in stable
unfoldings, partly to eliminate senseless crap, and partly to break
the recursive knots generated by instance declarations.
However, suppose we have
{-# INLINE <act> f #-}
f = <rhs>
meaning "inline f in phases p where activation <act>(p) holds".
Then what inlinings/rules can we apply to the copy of <rhs> captured in
f's stable unfolding? Our model is that literally <rhs> is substituted for
f when it is inlined. So our conservative plan (implemented by
updModeForStableUnfoldings) is this:
-------------------------------------------------------------
When simplifying the RHS of an stable unfolding, set the phase
to the phase in which the stable unfolding first becomes active
-------------------------------------------------------------
That ensures that
a) Rules/inlinings that *cease* being active before p will
not apply to the stable unfolding, consistent with it being
inlined in its *original* form in phase p.
b) Rules/inlinings that only become active *after* p will
not apply to the stable unfolding, again to be consistent with
inlining the *original* rhs in phase p.
For example,
{-# INLINE f #-}
f x = ...g...
{-# NOINLINE [1] g #-}
g y = ...
{-# RULE h g = ... #-}
Here we must not inline g into f's RHS, even when we get to phase 0,
because when f is later inlined into some other module we want the
rule for h to fire.
Similarly, consider
{-# INLINE f #-}
f x = ...g...
g y = ...
and suppose that there are auto-generated specialisations and a strictness
wrapper for g. The specialisations get activation AlwaysActive, and the
strictness wrapper get activation (ActiveAfter 0). So the strictness
wrepper fails the test and won't be inlined into f's stable unfolding. That
means f can inline, expose the specialised call to g, so the specialisation
rules can fire.
A note about wrappers
~~~~~~~~~~~~~~~~~~~~~
It's also important not to inline a worker back into a wrapper.
A wrapper looks like
wraper = inline_me (\x -> ...worker... )
Normally, the inline_me prevents the worker getting inlined into
the wrapper (initially, the worker's only call site!). But,
if the wrapper is sure to be called, the strictness analyser will
mark it 'demanded', so when the RHS is simplified, it'll get an ArgOf
continuation.
-}
activeUnfolding :: SimplEnv -> Id -> Bool
activeUnfolding env
| not (sm_inline mode) = active_unfolding_minimal
| otherwise = case sm_phase mode of
InitialPhase -> active_unfolding_gentle
Phase n -> active_unfolding n
where
mode = getMode env
getUnfoldingInRuleMatch :: SimplEnv -> InScopeEnv
-- When matching in RULE, we want to "look through" an unfolding
-- (to see a constructor) if *rules* are on, even if *inlinings*
-- are not. A notable example is DFuns, which really we want to
-- match in rules like (op dfun) in gentle mode. Another example
-- is 'otherwise' which we want exprIsConApp_maybe to be able to
-- see very early on
getUnfoldingInRuleMatch env
= (in_scope, id_unf)
where
in_scope = seInScope env
mode = getMode env
id_unf id | unf_is_active id = idUnfolding id
| otherwise = NoUnfolding
unf_is_active id
| not (sm_rules mode) = active_unfolding_minimal id
| otherwise = isActive (sm_phase mode) (idInlineActivation id)
active_unfolding_minimal :: Id -> Bool
-- Compuslory unfoldings only
-- Ignore SimplGently, because we want to inline regardless;
-- the Id has no top-level binding at all
--
-- NB: we used to have a second exception, for data con wrappers.
-- On the grounds that we use gentle mode for rule LHSs, and
-- they match better when data con wrappers are inlined.
-- But that only really applies to the trivial wrappers (like (:)),
-- and they are now constructed as Compulsory unfoldings (in MkId)
-- so they'll happen anyway.
active_unfolding_minimal id = isCompulsoryUnfolding (realIdUnfolding id)
active_unfolding :: PhaseNum -> Id -> Bool
active_unfolding n id = isActiveIn n (idInlineActivation id)
active_unfolding_gentle :: Id -> Bool
-- Anything that is early-active
-- See Note [Gentle mode]
active_unfolding_gentle id
= isInlinePragma prag
&& isEarlyActive (inlinePragmaActivation prag)
-- NB: wrappers are not early-active
where
prag = idInlinePragma id
----------------------
activeRule :: SimplEnv -> Activation -> Bool
-- Nothing => No rules at all
activeRule env
| not (sm_rules mode) = \_ -> False -- Rewriting is off
| otherwise = isActive (sm_phase mode)
where
mode = getMode env
{-
************************************************************************
* *
preInlineUnconditionally
* *
************************************************************************
preInlineUnconditionally
~~~~~~~~~~~~~~~~~~~~~~~~
@preInlineUnconditionally@ examines a bndr to see if it is used just
once in a completely safe way, so that it is safe to discard the
binding inline its RHS at the (unique) usage site, REGARDLESS of how
big the RHS might be. If this is the case we don't simplify the RHS
first, but just inline it un-simplified.
This is much better than first simplifying a perhaps-huge RHS and then
inlining and re-simplifying it. Indeed, it can be at least quadratically
better. Consider
x1 = e1
x2 = e2[x1]
x3 = e3[x2]
...etc...
xN = eN[xN-1]
We may end up simplifying e1 N times, e2 N-1 times, e3 N-3 times etc.
This can happen with cascades of functions too:
f1 = \x1.e1
f2 = \xs.e2[f1]
f3 = \xs.e3[f3]
...etc...
THE MAIN INVARIANT is this:
---- preInlineUnconditionally invariant -----
IF preInlineUnconditionally chooses to inline x = <rhs>
THEN doing the inlining should not change the occurrence
info for the free vars of <rhs>
----------------------------------------------
For example, it's tempting to look at trivial binding like
x = y
and inline it unconditionally. But suppose x is used many times,
but this is the unique occurrence of y. Then inlining x would change
y's occurrence info, which breaks the invariant. It matters: y
might have a BIG rhs, which will now be dup'd at every occurrenc of x.
Even RHSs labelled InlineMe aren't caught here, because there might be
no benefit from inlining at the call site.
[Sept 01] Don't unconditionally inline a top-level thing, because that
can simply make a static thing into something built dynamically. E.g.
x = (a,b)
main = \s -> h x
[Remember that we treat \s as a one-shot lambda.] No point in
inlining x unless there is something interesting about the call site.
But watch out: if you aren't careful, some useful foldr/build fusion
can be lost (most notably in spectral/hartel/parstof) because the
foldr didn't see the build. Doing the dynamic allocation isn't a big
deal, in fact, but losing the fusion can be. But the right thing here
seems to be to do a callSiteInline based on the fact that there is
something interesting about the call site (it's strict). Hmm. That
seems a bit fragile.
Conclusion: inline top level things gaily until Phase 0 (the last
phase), at which point don't.
Note [pre/postInlineUnconditionally in gentle mode]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Even in gentle mode we want to do preInlineUnconditionally. The
reason is that too little clean-up happens if you don't inline
use-once things. Also a bit of inlining is *good* for full laziness;
it can expose constant sub-expressions. Example in
spectral/mandel/Mandel.hs, where the mandelset function gets a useful
let-float if you inline windowToViewport
However, as usual for Gentle mode, do not inline things that are
inactive in the intial stages. See Note [Gentle mode].
Note [Stable unfoldings and preInlineUnconditionally]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Surprisingly, do not pre-inline-unconditionally Ids with INLINE pragmas!
Example
{-# INLINE f #-}
f :: Eq a => a -> a
f x = ...
fInt :: Int -> Int
fInt = f Int dEqInt
...fInt...fInt...fInt...
Here f occurs just once, in the RHS of fInt. But if we inline it there
we'll lose the opportunity to inline at each of fInt's call sites.
The INLINE pragma will only inline when the application is saturated
for exactly this reason; and we don't want PreInlineUnconditionally
to second-guess it. A live example is Trac #3736.
c.f. Note [Stable unfoldings and postInlineUnconditionally]
Note [Top-level bottoming Ids]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Don't inline top-level Ids that are bottoming, even if they are used just
once, because FloatOut has gone to some trouble to extract them out.
Inlining them won't make the program run faster!
Note [Do not inline CoVars unconditionally]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Coercion variables appear inside coercions, and the RHS of a let-binding
is a term (not a coercion) so we can't necessarily inline the latter in
the former.
-}
preInlineUnconditionally :: DynFlags -> SimplEnv -> TopLevelFlag -> InId -> InExpr -> Bool
-- Precondition: rhs satisfies the let/app invariant
-- See Note [CoreSyn let/app invariant] in CoreSyn
-- Reason: we don't want to inline single uses, or discard dead bindings,
-- for unlifted, side-effect-ful bindings
preInlineUnconditionally dflags env top_lvl bndr rhs
| not active = False
| isStableUnfolding (idUnfolding bndr) = False -- Note [Stable unfoldings and preInlineUnconditionally]
| isTopLevel top_lvl && isBottomingId bndr = False -- Note [Top-level bottoming Ids]
| not (gopt Opt_SimplPreInlining dflags) = False
| isCoVar bndr = False -- Note [Do not inline CoVars unconditionally]
| otherwise = case idOccInfo bndr of
IAmDead -> True -- Happens in ((\x.1) v)
OneOcc in_lam True int_cxt -> try_once in_lam int_cxt
_ -> False
where
mode = getMode env
active = isActive (sm_phase mode) act
-- See Note [pre/postInlineUnconditionally in gentle mode]
act = idInlineActivation bndr
try_once in_lam int_cxt -- There's one textual occurrence
| not in_lam = isNotTopLevel top_lvl || early_phase
| otherwise = int_cxt && canInlineInLam rhs
-- Be very careful before inlining inside a lambda, because (a) we must not
-- invalidate occurrence information, and (b) we want to avoid pushing a
-- single allocation (here) into multiple allocations (inside lambda).
-- Inlining a *function* with a single *saturated* call would be ok, mind you.
-- || (if is_cheap && not (canInlineInLam rhs) then pprTrace "preinline" (ppr bndr <+> ppr rhs) ok else ok)
-- where
-- is_cheap = exprIsCheap rhs
-- ok = is_cheap && int_cxt
-- int_cxt The context isn't totally boring
-- E.g. let f = \ab.BIG in \y. map f xs
-- Don't want to substitute for f, because then we allocate
-- its closure every time the \y is called
-- But: let f = \ab.BIG in \y. map (f y) xs
-- Now we do want to substitute for f, even though it's not
-- saturated, because we're going to allocate a closure for
-- (f y) every time round the loop anyhow.
-- canInlineInLam => free vars of rhs are (Once in_lam) or Many,
-- so substituting rhs inside a lambda doesn't change the occ info.
-- Sadly, not quite the same as exprIsHNF.
canInlineInLam (Lit _) = True
canInlineInLam (Lam b e) = isRuntimeVar b || canInlineInLam e
canInlineInLam (Tick t e) = not (tickishIsCode t) && canInlineInLam e
canInlineInLam _ = False
-- not ticks. Counting ticks cannot be duplicated, and non-counting
-- ticks around a Lam will disappear anyway.
early_phase = case sm_phase mode of
Phase 0 -> False
_ -> True
-- If we don't have this early_phase test, consider
-- x = length [1,2,3]
-- The full laziness pass carefully floats all the cons cells to
-- top level, and preInlineUnconditionally floats them all back in.
-- Result is (a) static allocation replaced by dynamic allocation
-- (b) many simplifier iterations because this tickles
-- a related problem; only one inlining per pass
--
-- On the other hand, I have seen cases where top-level fusion is
-- lost if we don't inline top level thing (e.g. string constants)
-- Hence the test for phase zero (which is the phase for all the final
-- simplifications). Until phase zero we take no special notice of
-- top level things, but then we become more leery about inlining
-- them.
{-
************************************************************************
* *
postInlineUnconditionally
* *
************************************************************************
postInlineUnconditionally
~~~~~~~~~~~~~~~~~~~~~~~~~
@postInlineUnconditionally@ decides whether to unconditionally inline
a thing based on the form of its RHS; in particular if it has a
trivial RHS. If so, we can inline and discard the binding altogether.
NB: a loop breaker has must_keep_binding = True and non-loop-breakers
only have *forward* references. Hence, it's safe to discard the binding
NOTE: This isn't our last opportunity to inline. We're at the binding
site right now, and we'll get another opportunity when we get to the
ocurrence(s)
Note that we do this unconditional inlining only for trival RHSs.
Don't inline even WHNFs inside lambdas; doing so may simply increase
allocation when the function is called. This isn't the last chance; see
NOTE above.
NB: Even inline pragmas (e.g. IMustBeINLINEd) are ignored here Why?
Because we don't even want to inline them into the RHS of constructor
arguments. See NOTE above
NB: At one time even NOINLINE was ignored here: if the rhs is trivial
it's best to inline it anyway. We often get a=E; b=a from desugaring,
with both a and b marked NOINLINE. But that seems incompatible with
our new view that inlining is like a RULE, so I'm sticking to the 'active'
story for now.
-}
postInlineUnconditionally
:: DynFlags -> SimplEnv -> TopLevelFlag
-> OutId -- The binder (an InId would be fine too)
-- (*not* a CoVar)
-> OccInfo -- From the InId
-> OutExpr
-> Unfolding
-> Bool
-- Precondition: rhs satisfies the let/app invariant
-- See Note [CoreSyn let/app invariant] in CoreSyn
-- Reason: we don't want to inline single uses, or discard dead bindings,
-- for unlifted, side-effect-ful bindings
postInlineUnconditionally dflags env top_lvl bndr occ_info rhs unfolding
| not active = False
| isWeakLoopBreaker occ_info = False -- If it's a loop-breaker of any kind, don't inline
-- because it might be referred to "earlier"
| isExportedId bndr = False
| isStableUnfolding unfolding = False -- Note [Stable unfoldings and postInlineUnconditionally]
| isTopLevel top_lvl = False -- Note [Top level and postInlineUnconditionally]
| exprIsTrivial rhs = True
| otherwise
= case occ_info of
-- The point of examining occ_info here is that for *non-values*
-- that occur outside a lambda, the call-site inliner won't have
-- a chance (because it doesn't know that the thing
-- only occurs once). The pre-inliner won't have gotten
-- it either, if the thing occurs in more than one branch
-- So the main target is things like
-- let x = f y in
-- case v of
-- True -> case x of ...
-- False -> case x of ...
-- This is very important in practice; e.g. wheel-seive1 doubles
-- in allocation if you miss this out
OneOcc in_lam _one_br int_cxt -- OneOcc => no code-duplication issue
-> smallEnoughToInline dflags unfolding -- Small enough to dup
-- ToDo: consider discount on smallEnoughToInline if int_cxt is true
--
-- NB: Do NOT inline arbitrarily big things, even if one_br is True
-- Reason: doing so risks exponential behaviour. We simplify a big
-- expression, inline it, and simplify it again. But if the
-- very same thing happens in the big expression, we get
-- exponential cost!
-- PRINCIPLE: when we've already simplified an expression once,
-- make sure that we only inline it if it's reasonably small.
&& (not in_lam ||
-- Outside a lambda, we want to be reasonably aggressive
-- about inlining into multiple branches of case
-- e.g. let x = <non-value>
-- in case y of { C1 -> ..x..; C2 -> ..x..; C3 -> ... }
-- Inlining can be a big win if C3 is the hot-spot, even if
-- the uses in C1, C2 are not 'interesting'
-- An example that gets worse if you add int_cxt here is 'clausify'
(isCheapUnfolding unfolding && int_cxt))
-- isCheap => acceptable work duplication; in_lam may be true
-- int_cxt to prevent us inlining inside a lambda without some
-- good reason. See the notes on int_cxt in preInlineUnconditionally
IAmDead -> True -- This happens; for example, the case_bndr during case of
-- known constructor: case (a,b) of x { (p,q) -> ... }
-- Here x isn't mentioned in the RHS, so we don't want to
-- create the (dead) let-binding let x = (a,b) in ...
_ -> False
-- Here's an example that we don't handle well:
-- let f = if b then Left (\x.BIG) else Right (\y.BIG)
-- in \y. ....case f of {...} ....
-- Here f is used just once, and duplicating the case work is fine (exprIsCheap).
-- But
-- - We can't preInlineUnconditionally because that woud invalidate
-- the occ info for b.
-- - We can't postInlineUnconditionally because the RHS is big, and
-- that risks exponential behaviour
-- - We can't call-site inline, because the rhs is big
-- Alas!
where
active = isActive (sm_phase (getMode env)) (idInlineActivation bndr)
-- See Note [pre/postInlineUnconditionally in gentle mode]
{-
Note [Top level and postInlineUnconditionally]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We don't do postInlineUnconditionally for top-level things (even for
ones that are trivial):
* Doing so will inline top-level error expressions that have been
carefully floated out by FloatOut. More generally, it might
replace static allocation with dynamic.
* Even for trivial expressions there's a problem. Consider
{-# RULE "foo" forall (xs::[T]). reverse xs = ruggle xs #-}
blah xs = reverse xs
ruggle = sort
In one simplifier pass we might fire the rule, getting
blah xs = ruggle xs
but in *that* simplifier pass we must not do postInlineUnconditionally
on 'ruggle' because then we'll have an unbound occurrence of 'ruggle'
If the rhs is trivial it'll be inlined by callSiteInline, and then
the binding will be dead and discarded by the next use of OccurAnal
* There is less point, because the main goal is to get rid of local
bindings used in multiple case branches.
* The inliner should inline trivial things at call sites anyway.
Note [Stable unfoldings and postInlineUnconditionally]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Do not do postInlineUnconditionally if the Id has an stable unfolding,
otherwise we lose the unfolding. Example
-- f has stable unfolding with rhs (e |> co)
-- where 'e' is big
f = e |> co
Then there's a danger we'll optimise to
f' = e
f = f' |> co
and now postInlineUnconditionally, losing the stable unfolding on f. Now f'
won't inline because 'e' is too big.
c.f. Note [Stable unfoldings and preInlineUnconditionally]
************************************************************************
* *
Rebuilding a lambda
* *
************************************************************************
-}
mkLam :: [OutBndr] -> OutExpr -> SimplCont -> SimplM OutExpr
-- mkLam tries three things
-- a) eta reduction, if that gives a trivial expression
-- b) eta expansion [only if there are some value lambdas]
mkLam [] body _cont
= return body
mkLam bndrs body cont
= do { dflags <- getDynFlags
; mkLam' dflags bndrs body }
where
mkLam' :: DynFlags -> [OutBndr] -> OutExpr -> SimplM OutExpr
mkLam' dflags bndrs (Cast body co)
| not (any bad bndrs)
-- Note [Casts and lambdas]
= do { lam <- mkLam' dflags bndrs body
; return (mkCast lam (mkPiCos Representational bndrs co)) }
where
co_vars = tyCoVarsOfCo co
bad bndr = isCoVar bndr && bndr `elemVarSet` co_vars
mkLam' dflags bndrs body@(Lam {})
= mkLam' dflags (bndrs ++ bndrs1) body1
where
(bndrs1, body1) = collectBinders body
mkLam' dflags bndrs (Tick t expr)
| tickishFloatable t
= mkTick t <$> mkLam' dflags bndrs expr
mkLam' dflags bndrs body
| gopt Opt_DoEtaReduction dflags
, Just etad_lam <- tryEtaReduce bndrs body
= do { tick (EtaReduction (head bndrs))
; return etad_lam }
| not (contIsRhs cont) -- See Note [Eta-expanding lambdas]
, gopt Opt_DoLambdaEtaExpansion dflags
, any isRuntimeVar bndrs
, let body_arity = exprEtaExpandArity dflags body
, body_arity > 0
= do { tick (EtaExpansion (head bndrs))
; let res = mkLams bndrs (etaExpand body_arity body)
; traceSmpl "eta expand" (vcat [text "before" <+> ppr (mkLams bndrs body)
, text "after" <+> ppr res])
; return res }
| otherwise
= return (mkLams bndrs body)
{-
Note [Eta expanding lambdas]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general we *do* want to eta-expand lambdas. Consider
f (\x -> case x of (a,b) -> \s -> blah)
where 's' is a state token, and hence can be eta expanded. This
showed up in the code for GHc.IO.Handle.Text.hPutChar, a rather
important function!
The eta-expansion will never happen unless we do it now. (Well, it's
possible that CorePrep will do it, but CorePrep only has a half-baked
eta-expander that can't deal with casts. So it's much better to do it
here.)
However, when the lambda is let-bound, as the RHS of a let, we have a
better eta-expander (in the form of tryEtaExpandRhs), so we don't
bother to try expansion in mkLam in that case; hence the contIsRhs
guard.
Note [Casts and lambdas]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider
(\x. (\y. e) `cast` g1) `cast` g2
There is a danger here that the two lambdas look separated, and the
full laziness pass might float an expression to between the two.
So this equation in mkLam' floats the g1 out, thus:
(\x. e `cast` g1) --> (\x.e) `cast` (tx -> g1)
where x:tx.
In general, this floats casts outside lambdas, where (I hope) they
might meet and cancel with some other cast:
\x. e `cast` co ===> (\x. e) `cast` (tx -> co)
/\a. e `cast` co ===> (/\a. e) `cast` (/\a. co)
/\g. e `cast` co ===> (/\g. e) `cast` (/\g. co)
(if not (g `in` co))
Notice that it works regardless of 'e'. Originally it worked only
if 'e' was itself a lambda, but in some cases that resulted in
fruitless iteration in the simplifier. A good example was when
compiling Text.ParserCombinators.ReadPrec, where we had a definition
like (\x. Get `cast` g)
where Get is a constructor with nonzero arity. Then mkLam eta-expanded
the Get, and the next iteration eta-reduced it, and then eta-expanded
it again.
Note also the side condition for the case of coercion binders.
It does not make sense to transform
/\g. e `cast` g ==> (/\g.e) `cast` (/\g.g)
because the latter is not well-kinded.
************************************************************************
* *
Eta expansion
* *
************************************************************************
-}
tryEtaExpandRhs :: SimplEnv -> OutId -> OutExpr -> SimplM (Arity, OutExpr)
-- See Note [Eta-expanding at let bindings]
tryEtaExpandRhs env bndr rhs
= do { dflags <- getDynFlags
; (new_arity, new_rhs) <- try_expand dflags
; WARN( new_arity < old_id_arity,
(text "Arity decrease:" <+> (ppr bndr <+> ppr old_id_arity
<+> ppr old_arity <+> ppr new_arity) $$ ppr new_rhs) )
-- Note [Arity decrease] in Simplify
return (new_arity, new_rhs) }
where
try_expand dflags
| exprIsTrivial rhs
= return (exprArity rhs, rhs)
| sm_eta_expand (getMode env) -- Provided eta-expansion is on
, let new_arity1 = findRhsArity dflags bndr rhs old_arity
new_arity2 = idCallArity bndr
new_arity = max new_arity1 new_arity2
, new_arity > old_arity -- And the current manifest arity isn't enough
= do { tick (EtaExpansion bndr)
; return (new_arity, etaExpand new_arity rhs) }
| otherwise
= return (old_arity, rhs)
old_arity = exprArity rhs -- See Note [Do not expand eta-expand PAPs]
old_id_arity = idArity bndr
{-
Note [Eta-expanding at let bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We now eta expand at let-bindings, which is where the payoff comes.
The most significant thing is that we can do a simple arity analysis
(in CoreArity.findRhsArity), which we can't do for free-floating lambdas
One useful consequence of not eta-expanding lambdas is this example:
genMap :: C a => ...
{-# INLINE genMap #-}
genMap f xs = ...
myMap :: D a => ...
{-# INLINE myMap #-}
myMap = genMap
Notice that 'genMap' should only inline if applied to two arguments.
In the stable unfolding for myMap we'll have the unfolding
(\d -> genMap Int (..d..))
We do not want to eta-expand to
(\d f xs -> genMap Int (..d..) f xs)
because then 'genMap' will inline, and it really shouldn't: at least
as far as the programmer is concerned, it's not applied to two
arguments!
Note [Do not eta-expand PAPs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We used to have old_arity = manifestArity rhs, which meant that we
would eta-expand even PAPs. But this gives no particular advantage,
and can lead to a massive blow-up in code size, exhibited by Trac #9020.
Suppose we have a PAP
foo :: IO ()
foo = returnIO ()
Then we can eta-expand do
foo = (\eta. (returnIO () |> sym g) eta) |> g
where
g :: IO () ~ State# RealWorld -> (# State# RealWorld, () #)
But there is really no point in doing this, and it generates masses of
coercions and whatnot that eventually disappear again. For T9020, GHC
allocated 6.6G beore, and 0.8G afterwards; and residency dropped from
1.8G to 45M.
But note that this won't eta-expand, say
f = \g -> map g
Does it matter not eta-expanding such functions? I'm not sure. Perhaps
strictness analysis will have less to bite on?
************************************************************************
* *
\subsection{Floating lets out of big lambdas}
* *
************************************************************************
Note [Floating and type abstraction]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
x = /\a. C e1 e2
We'd like to float this to
y1 = /\a. e1
y2 = /\a. e2
x = /\a. C (y1 a) (y2 a)
for the usual reasons: we want to inline x rather vigorously.
You may think that this kind of thing is rare. But in some programs it is
common. For example, if you do closure conversion you might get:
data a :-> b = forall e. (e -> a -> b) :$ e
f_cc :: forall a. a :-> a
f_cc = /\a. (\e. id a) :$ ()
Now we really want to inline that f_cc thing so that the
construction of the closure goes away.
So I have elaborated simplLazyBind to understand right-hand sides that look
like
/\ a1..an. body
and treat them specially. The real work is done in SimplUtils.abstractFloats,
but there is quite a bit of plumbing in simplLazyBind as well.
The same transformation is good when there are lets in the body:
/\abc -> let(rec) x = e in b
==>
let(rec) x' = /\abc -> let x = x' a b c in e
in
/\abc -> let x = x' a b c in b
This is good because it can turn things like:
let f = /\a -> letrec g = ... g ... in g
into
letrec g' = /\a -> ... g' a ...
in
let f = /\ a -> g' a
which is better. In effect, it means that big lambdas don't impede
let-floating.
This optimisation is CRUCIAL in eliminating the junk introduced by
desugaring mutually recursive definitions. Don't eliminate it lightly!
[May 1999] If we do this transformation *regardless* then we can
end up with some pretty silly stuff. For example,
let
st = /\ s -> let { x1=r1 ; x2=r2 } in ...
in ..
becomes
let y1 = /\s -> r1
y2 = /\s -> r2
st = /\s -> ...[y1 s/x1, y2 s/x2]
in ..
Unless the "..." is a WHNF there is really no point in doing this.
Indeed it can make things worse. Suppose x1 is used strictly,
and is of the form
x1* = case f y of { (a,b) -> e }
If we abstract this wrt the tyvar we then can't do the case inline
as we would normally do.
That's why the whole transformation is part of the same process that
floats let-bindings and constructor arguments out of RHSs. In particular,
it is guarded by the doFloatFromRhs call in simplLazyBind.
Note [Which type variables to abstract over]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Abstract only over the type variables free in the rhs wrt which the
new binding is abstracted. Note that
* The naive approach of abstracting wrt the
tyvars free in the Id's /type/ fails. Consider:
/\ a b -> let t :: (a,b) = (e1, e2)
x :: a = fst t
in ...
Here, b isn't free in x's type, but we must nevertheless
abstract wrt b as well, because t's type mentions b.
Since t is floated too, we'd end up with the bogus:
poly_t = /\ a b -> (e1, e2)
poly_x = /\ a -> fst (poly_t a *b*)
* We must do closeOverKinds. Example (Trac #10934):
f = /\k (f:k->*) (a:k). let t = AccFailure @ (f a) in ...
Here we want to float 't', but we must remember to abstract over
'k' as well, even though it is not explicitly mentioned in the RHS,
otherwise we get
t = /\ (f:k->*) (a:k). AccFailure @ (f a)
which is obviously bogus.
-}
abstractFloats :: [OutTyVar] -> SimplEnv -> OutExpr -> SimplM ([OutBind], OutExpr)
abstractFloats main_tvs body_env body
= ASSERT( notNull body_floats )
do { (subst, float_binds) <- mapAccumLM abstract empty_subst body_floats
; return (float_binds, CoreSubst.substExpr (text "abstract_floats1") subst body) }
where
main_tv_set = mkVarSet main_tvs
body_floats = getFloatBinds body_env
empty_subst = CoreSubst.mkEmptySubst (seInScope body_env)
abstract :: CoreSubst.Subst -> OutBind -> SimplM (CoreSubst.Subst, OutBind)
abstract subst (NonRec id rhs)
= do { (poly_id, poly_app) <- mk_poly tvs_here id
; let poly_rhs = mkLams tvs_here rhs'
subst' = CoreSubst.extendIdSubst subst id poly_app
; return (subst', (NonRec poly_id poly_rhs)) }
where
rhs' = CoreSubst.substExpr (text "abstract_floats2") subst rhs
-- tvs_here: see Note [Which type variables to abstract over]
tvs_here = varSetElemsWellScoped $
intersectVarSet main_tv_set $
closeOverKinds $
exprSomeFreeVars isTyVar rhs'
abstract subst (Rec prs)
= do { (poly_ids, poly_apps) <- mapAndUnzipM (mk_poly tvs_here) ids
; let subst' = CoreSubst.extendSubstList subst (ids `zip` poly_apps)
poly_rhss = [mkLams tvs_here (CoreSubst.substExpr (text "abstract_floats3") subst' rhs)
| rhs <- rhss]
; return (subst', Rec (poly_ids `zip` poly_rhss)) }
where
(ids,rhss) = unzip prs
-- For a recursive group, it's a bit of a pain to work out the minimal
-- set of tyvars over which to abstract:
-- /\ a b c. let x = ...a... in
-- letrec { p = ...x...q...
-- q = .....p...b... } in
-- ...
-- Since 'x' is abstracted over 'a', the {p,q} group must be abstracted
-- over 'a' (because x is replaced by (poly_x a)) as well as 'b'.
-- Since it's a pain, we just use the whole set, which is always safe
--
-- If you ever want to be more selective, remember this bizarre case too:
-- x::a = x
-- Here, we must abstract 'x' over 'a'.
tvs_here = toposortTyVars main_tvs
mk_poly tvs_here var
= do { uniq <- getUniqueM
; let poly_name = setNameUnique (idName var) uniq -- Keep same name
poly_ty = mkInvForAllTys tvs_here (idType var) -- But new type of course
poly_id = transferPolyIdInfo var tvs_here $ -- Note [transferPolyIdInfo] in Id.hs
mkLocalIdOrCoVar poly_name poly_ty
; return (poly_id, mkTyApps (Var poly_id) (mkTyVarTys tvs_here)) }
-- In the olden days, it was crucial to copy the occInfo of the original var,
-- because we were looking at occurrence-analysed but as yet unsimplified code!
-- In particular, we mustn't lose the loop breakers. BUT NOW we are looking
-- at already simplified code, so it doesn't matter
--
-- It's even right to retain single-occurrence or dead-var info:
-- Suppose we started with /\a -> let x = E in B
-- where x occurs once in B. Then we transform to:
-- let x' = /\a -> E in /\a -> let x* = x' a in B
-- where x* has an INLINE prag on it. Now, once x* is inlined,
-- the occurrences of x' will be just the occurrences originally
-- pinned on x.
{-
Note [Abstract over coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If a coercion variable (g :: a ~ Int) is free in the RHS, then so is the
type variable a. Rather than sort this mess out, we simply bale out and abstract
wrt all the type variables if any of them are coercion variables.
Historical note: if you use let-bindings instead of a substitution, beware of this:
-- Suppose we start with:
--
-- x = /\ a -> let g = G in E
--
-- Then we'll float to get
--
-- x = let poly_g = /\ a -> G
-- in /\ a -> let g = poly_g a in E
--
-- But now the occurrence analyser will see just one occurrence
-- of poly_g, not inside a lambda, so the simplifier will
-- PreInlineUnconditionally poly_g back into g! Badk to square 1!
-- (I used to think that the "don't inline lone occurrences" stuff
-- would stop this happening, but since it's the *only* occurrence,
-- PreInlineUnconditionally kicks in first!)
--
-- Solution: put an INLINE note on g's RHS, so that poly_g seems
-- to appear many times. (NB: mkInlineMe eliminates
-- such notes on trivial RHSs, so do it manually.)
************************************************************************
* *
prepareAlts
* *
************************************************************************
prepareAlts tries these things:
1. Eliminate alternatives that cannot match, including the
DEFAULT alternative.
2. If the DEFAULT alternative can match only one possible constructor,
then make that constructor explicit.
e.g.
case e of x { DEFAULT -> rhs }
===>
case e of x { (a,b) -> rhs }
where the type is a single constructor type. This gives better code
when rhs also scrutinises x or e.
3. Returns a list of the constructors that cannot holds in the
DEFAULT alternative (if there is one)
Here "cannot match" includes knowledge from GADTs
It's a good idea to do this stuff before simplifying the alternatives, to
avoid simplifying alternatives we know can't happen, and to come up with
the list of constructors that are handled, to put into the IdInfo of the
case binder, for use when simplifying the alternatives.
Eliminating the default alternative in (1) isn't so obvious, but it can
happen:
data Colour = Red | Green | Blue
f x = case x of
Red -> ..
Green -> ..
DEFAULT -> h x
h y = case y of
Blue -> ..
DEFAULT -> [ case y of ... ]
If we inline h into f, the default case of the inlined h can't happen.
If we don't notice this, we may end up filtering out *all* the cases
of the inner case y, which give us nowhere to go!
-}
prepareAlts :: OutExpr -> OutId -> [InAlt] -> SimplM ([AltCon], [InAlt])
-- The returned alternatives can be empty, none are possible
prepareAlts scrut case_bndr' alts
| Just (tc, tys) <- splitTyConApp_maybe (varType case_bndr')
-- Case binder is needed just for its type. Note that as an
-- OutId, it has maximum information; this is important.
-- Test simpl013 is an example
= do { us <- getUniquesM
; let (idcs1, alts1) = filterAlts tc tys imposs_cons alts
(yes2, alts2) = refineDefaultAlt us tc tys idcs1 alts1
(yes3, idcs3, alts3) = combineIdenticalAlts idcs1 alts2
-- "idcs" stands for "impossible default data constructors"
-- i.e. the constructors that can't match the default case
; when yes2 $ tick (FillInCaseDefault case_bndr')
; when yes3 $ tick (AltMerge case_bndr')
; return (idcs3, alts3) }
| otherwise -- Not a data type, so nothing interesting happens
= return ([], alts)
where
imposs_cons = case scrut of
Var v -> otherCons (idUnfolding v)
_ -> []
{-
************************************************************************
* *
mkCase
* *
************************************************************************
mkCase tries these things
1. Merge Nested Cases
case e of b { ==> case e of b {
p1 -> rhs1 p1 -> rhs1
... ...
pm -> rhsm pm -> rhsm
_ -> case b of b' { pn -> let b'=b in rhsn
pn -> rhsn ...
... po -> let b'=b in rhso
po -> rhso _ -> let b'=b in rhsd
_ -> rhsd
}
which merges two cases in one case when -- the default alternative of
the outer case scrutises the same variable as the outer case. This
transformation is called Case Merging. It avoids that the same
variable is scrutinised multiple times.
2. Eliminate Identity Case
case e of ===> e
True -> True;
False -> False
and similar friends.
-}
mkCase, mkCase1, mkCase2
:: DynFlags
-> OutExpr -> OutId
-> OutType -> [OutAlt] -- Alternatives in standard (increasing) order
-> SimplM OutExpr
--------------------------------------------------
-- 1. Merge Nested Cases
--------------------------------------------------
mkCase dflags scrut outer_bndr alts_ty ((DEFAULT, _, deflt_rhs) : outer_alts)
| gopt Opt_CaseMerge dflags
, (ticks, Case (Var inner_scrut_var) inner_bndr _ inner_alts)
<- stripTicksTop tickishFloatable deflt_rhs
, inner_scrut_var == outer_bndr
= do { tick (CaseMerge outer_bndr)
; let wrap_alt (con, args, rhs) = ASSERT( outer_bndr `notElem` args )
(con, args, wrap_rhs rhs)
-- Simplifier's no-shadowing invariant should ensure
-- that outer_bndr is not shadowed by the inner patterns
wrap_rhs rhs = Let (NonRec inner_bndr (Var outer_bndr)) rhs
-- The let is OK even for unboxed binders,
wrapped_alts | isDeadBinder inner_bndr = inner_alts
| otherwise = map wrap_alt inner_alts
merged_alts = mergeAlts outer_alts wrapped_alts
-- NB: mergeAlts gives priority to the left
-- case x of
-- A -> e1
-- DEFAULT -> case x of
-- A -> e2
-- B -> e3
-- When we merge, we must ensure that e1 takes
-- precedence over e2 as the value for A!
; fmap (mkTicks ticks) $
mkCase1 dflags scrut outer_bndr alts_ty merged_alts
}
-- Warning: don't call mkCase recursively!
-- Firstly, there's no point, because inner alts have already had
-- mkCase applied to them, so they won't have a case in their default
-- Secondly, if you do, you get an infinite loop, because the bindCaseBndr
-- in munge_rhs may put a case into the DEFAULT branch!
mkCase dflags scrut bndr alts_ty alts = mkCase1 dflags scrut bndr alts_ty alts
--------------------------------------------------
-- 2. Eliminate Identity Case
--------------------------------------------------
mkCase1 _dflags scrut case_bndr _ alts@((_,_,rhs1) : _) -- Identity case
| all identity_alt alts
= do { tick (CaseIdentity case_bndr)
; return (mkTicks ticks $ re_cast scrut rhs1) }
where
ticks = concatMap (stripTicksT tickishFloatable . thdOf3) (tail alts)
identity_alt (con, args, rhs) = check_eq rhs con args
check_eq (Cast rhs co) con args
= not (any (`elemVarSet` tyCoVarsOfCo co) args) && check_eq rhs con args
-- See Note [RHS casts]
check_eq (Lit lit) (LitAlt lit') _ = lit == lit'
check_eq (Var v) _ _ | v == case_bndr = True
check_eq (Var v) (DataAlt con) [] = v == dataConWorkId con
-- Optimisation only
check_eq (Tick t e) alt args = tickishFloatable t &&
check_eq e alt args
check_eq rhs (DataAlt con) args = cheapEqExpr' tickishFloatable rhs $
mkConApp con (arg_tys ++
varsToCoreExprs args)
check_eq _ _ _ = False
arg_tys = map Type (tyConAppArgs (idType case_bndr))
-- Note [RHS casts]
-- ~~~~~~~~~~~~~~~~
-- We've seen this:
-- case e of x { _ -> x `cast` c }
-- And we definitely want to eliminate this case, to give
-- e `cast` c
-- So we throw away the cast from the RHS, and reconstruct
-- it at the other end. All the RHS casts must be the same
-- if (all identity_alt alts) holds.
--
-- Don't worry about nested casts, because the simplifier combines them
re_cast scrut (Cast rhs co) = Cast (re_cast scrut rhs) co
re_cast scrut _ = scrut
mkCase1 dflags scrut bndr alts_ty alts = mkCase2 dflags scrut bndr alts_ty alts
--------------------------------------------------
-- Catch-all
--------------------------------------------------
mkCase2 _dflags scrut bndr alts_ty alts
= return (Case scrut bndr alts_ty alts)
{-
Note [Dead binders]
~~~~~~~~~~~~~~~~~~~~
Note that dead-ness is maintained by the simplifier, so that it is
accurate after simplification as well as before.
Note [Cascading case merge]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Case merging should cascade in one sweep, because it
happens bottom-up
case e of a {
DEFAULT -> case a of b
DEFAULT -> case b of c {
DEFAULT -> e
A -> ea
B -> eb
C -> ec
==>
case e of a {
DEFAULT -> case a of b
DEFAULT -> let c = b in e
A -> let c = b in ea
B -> eb
C -> ec
==>
case e of a {
DEFAULT -> let b = a in let c = b in e
A -> let b = a in let c = b in ea
B -> let b = a in eb
C -> ec
However here's a tricky case that we still don't catch, and I don't
see how to catch it in one pass:
case x of c1 { I# a1 ->
case a1 of c2 ->
0 -> ...
DEFAULT -> case x of c3 { I# a2 ->
case a2 of ...
After occurrence analysis (and its binder-swap) we get this
case x of c1 { I# a1 ->
let x = c1 in -- Binder-swap addition
case a1 of c2 ->
0 -> ...
DEFAULT -> case x of c3 { I# a2 ->
case a2 of ...
When we simplify the inner case x, we'll see that
x=c1=I# a1. So we'll bind a2 to a1, and get
case x of c1 { I# a1 ->
case a1 of c2 ->
0 -> ...
DEFAULT -> case a1 of ...
This is corect, but we can't do a case merge in this sweep
because c2 /= a1. Reason: the binding c1=I# a1 went inwards
without getting changed to c1=I# c2.
I don't think this is worth fixing, even if I knew how. It'll
all come out in the next pass anyway.
-}
| tjakway/ghcjvm | compiler/simplCore/SimplUtils.hs | bsd-3-clause | 79,475 | 0 | 18 | 24,308 | 8,336 | 4,456 | 3,880 | -1 | -1 |
module Q3Demo.Loader.Zip where
{-
Zip specification:
http://en.wikipedia.org/wiki/Zip_(file_format)
http://www.pkware.com/documents/casestudies/APPNOTE.TXT
-}
import Control.Applicative
import Data.Binary.Get
import Data.Bits
import Data.Word
import System.IO.MMap
import qualified Codec.Compression.Zlib.Raw as Zlib
import qualified Data.ByteString.Char8 as SB
import qualified Data.ByteString.Lazy as LB
data Entry
= Entry
{ eFilePath :: String
, eIsCompressed :: Bool
, eData :: LB.ByteString
}
type Archive = [Entry]
decompress' :: Entry -> LB.ByteString
decompress' (Entry _ False dat) = dat
decompress' (Entry _ True dat) = Zlib.decompress dat
decompress :: Entry -> SB.ByteString
decompress = SB.concat . LB.toChunks . decompress'
readArchive :: String -> IO Archive
readArchive n = runGet getArchive . LB.fromChunks . (:[]) <$> mmapFileByteString n Nothing
chunks :: Word32 -> Get a -> Get [a]
chunks c a = lookAhead getWord32le >>= \code -> case code == c of
True -> (:) <$> a <*> chunks c a
False -> return []
getArchive :: Get Archive
getArchive = chunks 0x04034b50 $ do
-- local file header
skip 6
flag <- getWord16le
isComp <- getWord16le >>= \i -> case i of
0 -> return False
8 -> return True
_ -> fail "Unsupported compression method!"
skip 8
size <- getWord32le
skip 4
nameLen <- getWord16le
extraLen <- getWord16le
name <- SB.unpack <$> getByteString (fromIntegral nameLen)
skip $ fromIntegral extraLen
d <- if flag .&. 8 /= 0 then fail "Zip data descriptor is not supported!" else getLazyByteString $ fromIntegral size
return $ Entry name isComp d
| csabahruska/q3demo | src/Q3Demo/Loader/Zip.hs | bsd-3-clause | 1,705 | 0 | 14 | 374 | 506 | 263 | 243 | 43 | 4 |
{-# LANGUAGE CPP #-}
{- |
Module : $Header$
Description : reading Lib-Defns
Copyright : (c) C. Maeder, DFKI 2014
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable(DevGraph)
reading Lib-Defns for various logics, OWL and CL return several Lib-Defns
-}
module Driver.ReadLibDefn (readLibDefn) where
import Logic.Grothendieck
import Syntax.AS_Library
import Syntax.Parse_AS_Library
import ATC.Sml_cats
import ATC.LibName ()
import CommonLogic.ParseCLAsLibDefn
#ifndef NOOWLLOGIC
import OWL2.ParseOWLAsLibDefn
#endif
#ifdef RDFLOGIC
-- import RDF.ParseRDFAsLibDefn -- MODULE RDF IS BROKEN AT THE MOMENT
#endif
import CSMOF.ParseXmiAsLibDefn
import QVTR.ParseQvtAsLibDefn
import SoftFOL.ParseTPTPAsLibDefn
import FreeCAD.Logic_FreeCAD
import Driver.Options
import Driver.ReadFn
import Common.AnnoState
import Common.Result
import Common.ResultT
import Text.ParserCombinators.Parsec
import Control.Monad.Trans (MonadIO (..))
import Data.List
mimeTypeMap :: [(String, InType)]
mimeTypeMap =
[ ("xml", DgXml)
, ("html", HtmlIn)
, ("rdf", OWLIn RdfXml)
, ("owl", OWLIn OwlXml)
, ("obo", OWLIn OBO)
, ("ttl", OWLIn Turtle)
, ("turtle", OWLIn Turtle)
, ("omn", OWLIn Manchester)
, ("dol", DOLIn)
, ("clif", CommonLogicIn True)
, ("het", HetCASLIn)
, ("casl", CASLIn) ]
owlXmlTypes :: [InType]
owlXmlTypes = map OWLIn [OwlXml, RdfXml, Turtle]
joinFileTypes :: InType -> InType -> InType
joinFileTypes ext magic = case (ext, magic) of
(GuessIn, _) -> magic
(_, GuessIn) -> ext
(DgXml, _) | elem magic owlXmlTypes -> magic
(_, DgXml) | elem ext owlXmlTypes -> ext
(_, HtmlIn) -> magic
_ -> ext -- ignore contradictions
findFiletype :: String -> InType
findFiletype s =
maybe GuessIn snd $ find (\ (r, _) -> isInfixOf ('/' : r) s) mimeTypeMap
guessInput :: MonadIO m => HetcatsOpts -> Maybe String -> FilePath -> String
-> m InType
guessInput opts mr file input =
let fty1 = guess file (intype opts)
fty2 = maybe GuessIn findFiletype mr
fty = joinFileTypes fty1 fty2
in if elem fty $ GuessIn : DgXml : owlXmlTypes then
case guessXmlContent (fty == DgXml) input of
Left ty -> fail ty
Right ty -> case ty of
DgXml -> fail "unexpected DGraph xml"
_ -> return $ joinFileTypes fty ty
else return fty
readLibDefn :: LogicGraph -> HetcatsOpts -> Maybe String
-> FilePath -> FilePath -> String -> ResultT IO [LIB_DEFN]
readLibDefn lgraph opts mr file fileForPos input =
if null input then fail ("empty input file: " ++ file) else
case intype opts of
ATermIn _ -> return [from_sml_ATermString input]
FreeCADIn ->
liftIO $ fmap (: []) . readFreeCADLib file $ fileToLibName opts file
_ -> do
ty <- guessInput opts mr file input
case ty of
HtmlIn -> fail "unexpected html input"
CommonLogicIn _ -> parseCL_CLIF file opts
#ifdef RDFLOGIC
-- RDFIn -> liftIO $ parseRDF file
#endif
Xmi -> liftIO $ fmap (: []) $ parseXmi file
Qvt -> liftIO $ fmap (: []) $ parseQvt file
TPTPIn -> liftIO $ fmap (: []) $ parseTPTP input file
#ifndef NOOWLLOGIC
OWLIn _ -> parseOWL (isStructured opts) file
#endif
_ -> case runParser (library lgraph { dolOnly = ty == DOLIn })
(emptyAnnos ()) fileForPos input of
Left err -> fail (showErr err)
Right ast -> return [ast]
| mariefarrell/Hets | Driver/ReadLibDefn.hs | gpl-2.0 | 3,436 | 0 | 20 | 728 | 1,038 | 553 | 485 | 82 | 11 |
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : $Header$
Copyright : (c) T.Mossakowski, W.Herding, C.Maeder, Uni Bremen 2004-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
Abstract syntax for hybrid logic extension of CASL
Only the added syntax is specified
-}
module Hybrid.AS_Hybrid where
import Common.Id
import Common.AS_Annotation
import CASL.AS_Basic_CASL
import Data.Data
-- DrIFT command
{-! global: GetRange !-}
type H_BASIC_SPEC = BASIC_SPEC H_BASIC_ITEM H_SIG_ITEM H_FORMULA
type AnHybFORM = Annoted (FORMULA H_FORMULA)
data H_BASIC_ITEM = Simple_mod_decl [Annoted SIMPLE_ID] [AnHybFORM] Range
| Term_mod_decl [Annoted SORT] [AnHybFORM] Range
| Simple_nom_decl [Annoted SIMPLE_ID] [AnHybFORM] Range
deriving (Show, Typeable, Data)
data RIGOR = Rigid | Flexible deriving (Show, Typeable, Data)
data H_SIG_ITEM =
Rigid_op_items RIGOR [Annoted (OP_ITEM H_FORMULA)] Range
-- pos: op, semi colons
| Rigid_pred_items RIGOR [Annoted (PRED_ITEM H_FORMULA)] Range
-- pos: pred, semi colons
deriving (Show, Typeable, Data)
data MODALITY = Simple_mod SIMPLE_ID | Term_mod (TERM H_FORMULA)
deriving (Show, Eq, Ord, Typeable, Data)
data NOMINAL = Simple_nom SIMPLE_ID
deriving (Show, Eq, Ord, Typeable, Data)
data H_FORMULA = At NOMINAL (FORMULA H_FORMULA) Range
| BoxOrDiamond Bool MODALITY (FORMULA H_FORMULA) Range
| Here NOMINAL Range
| Univ NOMINAL (FORMULA H_FORMULA) Range
| Exist NOMINAL (FORMULA H_FORMULA) Range
deriving (Show, Eq, Ord, Typeable, Data)
| keithodulaigh/Hets | Hybrid/AS_Hybrid.der.hs | gpl-2.0 | 1,808 | 0 | 10 | 461 | 390 | 214 | 176 | 27 | 0 |
{-|
Module : Test
Description : QuickCheck tests for MaximumMatching
Licence : LGPL-2.1
Maintainer : Manuel Eberl <last name + m _at_ in.tum.de>
Stability : experimental
This module provides some QuickCheck tests for the functions related to maximal and maximum matchings.
-}
module Main where
import Data.Graph.Inductive.Query.MaximumMatching
import Data.Graph.Inductive.Graph
import Data.Graph.Inductive.Basic
import Data.Graph.Inductive.PatriciaTree (Gr)
import Data.Ord
import Data.List (nub, maximumBy, sort)
import Data.Maybe (fromJust)
import qualified Data.Set as S
import Data.Map (Map)
import qualified Data.Map as M
import Test.QuickCheck hiding (label)
import Test.QuickCheck.Gen
maximalMatchingsNaive :: Graph gr => gr a b -> [[Edge]]
maximalMatchingsNaive g = aux []
where augment m = [(a,b) : m | (a,b) <- edges g, all (\(c,d) -> a /= c && a /= d && b /= c && b /= d) m]
aux m = case augment m of
[] -> [m]
ms -> concatMap aux ms
maximumMatchingNaive :: Graph gr => gr a b -> [Edge]
maximumMatchingNaive g = maximumBy (comparing length) (maximalMatchings g)
isMaximumMatchingNaive :: Graph gr => gr a b -> [Edge] -> Bool
isMaximumMatchingNaive g m = isMatching g m && length m == length (maximumMatchingNaive g)
genGraph =
do es <- fmap (filter (uncurry (/=))) arbitrary :: Gen [(Integer, Integer)]
let ns = nub (concatMap (\(a,b) -> [a,b]) es)
let nodes = newNodes (length ns) (empty :: Gr () ())
let lnodes = zip nodes (repeat ()) :: [LNode ()]
let m = M.fromList (zip ns nodes)
let ledges = [(fromJust $ M.lookup x m, fromJust $ M.lookup y m, ()) | ((x,y), i) <- zip es [0..]]
return (mkGraph lnodes ledges)
propMaximalMatchingsComplete :: Property
propMaximalMatchingsComplete =
forAll genGraph $ \g -> let ms = S.fromList (map sort (maximalMatchings g))
in all (\m -> sort m `S.member` ms) (maximalMatchingsNaive g)
propMaximalMatchingsAreMaximal :: Property
propMaximalMatchingsAreMaximal = forAll genGraph $ \g -> all (isMaximalMatching g) (maximalMatchings (g :: Gr () ()))
propMaximumMatchingIsMaximal :: Property
propMaximumMatchingIsMaximal = forAll genGraph $ \g -> isMaximalMatching g (maximumMatching (g :: Gr () ()))
propMaximumMatchingIsMaximum :: Property
propMaximumMatchingIsMaximum = forAll genGraph $ \g -> isMaximumMatchingNaive g (maximumMatching (g :: Gr () ()))
main =
do putStrLn "Test 1: maximalMatchings computes all maximal matchings"
quickCheckWith stdArgs {maxSize = 15, maxSuccess = 2000} propMaximalMatchingsComplete
putStrLn "Test 2: maximalMatchings computes only maximal matchings"
quickCheckWith stdArgs {maxSize = 20, maxSuccess = 10000} propMaximalMatchingsAreMaximal
putStrLn "Test 3: maximumMatchings computes a maximal matching"
quickCheckWith stdArgs {maxSuccess = 10000} propMaximumMatchingIsMaximal
putStrLn "Test 4: maximumMatchings computes a maximum matching"
quickCheckWith stdArgs {maxSize = 30, maxSuccess = 2000} propMaximumMatchingIsMaximum
| 3of8/haskell_playground | maximum-matching/Test.hs | gpl-2.0 | 3,106 | 2 | 20 | 612 | 998 | 527 | 471 | 50 | 2 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Tests.Readers.RST (tests) where
import Text.Pandoc.Definition
import Test.Framework
import Tests.Helpers
import Tests.Arbitrary()
import Text.Pandoc.Builder
import Text.Pandoc
import Text.Pandoc.Error
rst :: String -> Pandoc
rst = handleError . readRST def{ readerStandalone = True }
infix 4 =:
(=:) :: ToString c
=> String -> (String, c) -> Test
(=:) = test rst
tests :: [Test]
tests = [ "line block with blank line" =:
"| a\n|\n| b" =?> para (str "a") <>
para (str "\160b")
, testGroup "field list"
[ "general" =: unlines
[ "para"
, ""
, ":Hostname: media08"
, ":IP address: 10.0.0.19"
, ":Size: 3ru"
, ":Version: 1"
, ":Indentation: Since the field marker may be quite long, the second"
, " and subsequent lines of the field body do not have to line up"
, " with the first line, but they must be indented relative to the"
, " field name marker, and they must line up with each other."
, ":Parameter i: integer"
, ":Final: item"
, " on two lines" ]
=?> ( doc
$ para "para" <>
definitionList [ (str "Hostname", [para "media08"])
, (text "IP address", [para "10.0.0.19"])
, (str "Size", [para "3ru"])
, (str "Version", [para "1"])
, (str "Indentation", [para "Since the field marker may be quite long, the second and subsequent lines of the field body do not have to line up with the first line, but they must be indented relative to the field name marker, and they must line up with each other."])
, (text "Parameter i", [para "integer"])
, (str "Final", [para "item on two lines"])
])
, "metadata" =: unlines
[ "====="
, "Title"
, "====="
, "--------"
, "Subtitle"
, "--------"
, ""
, ":Version: 1"
]
=?> ( setMeta "version" (para "1")
$ setMeta "title" ("Title" :: Inlines)
$ setMeta "subtitle" ("Subtitle" :: Inlines)
$ doc mempty )
, "with inline markup" =: unlines
[ ":*Date*: today"
, ""
, ".."
, ""
, ":*one*: emphasis"
, ":two_: reference"
, ":`three`_: another one"
, ":``four``: literal"
, ""
, ".. _two: http://example.com"
, ".. _three: http://example.org"
]
=?> ( setMeta "date" (str "today")
$ doc
$ definitionList [ (emph "one", [para "emphasis"])
, (link "http://example.com" "" "two", [para "reference"])
, (link "http://example.org" "" "three", [para "another one"])
, (code "four", [para "literal"])
])
]
, "URLs with following punctuation" =:
("http://google.com, http://yahoo.com; http://foo.bar.baz.\n" ++
"http://foo.bar/baz_(bam) (http://foo.bar)") =?>
para (link "http://google.com" "" "http://google.com" <> ", " <>
link "http://yahoo.com" "" "http://yahoo.com" <> "; " <>
link "http://foo.bar.baz" "" "http://foo.bar.baz" <> ". " <>
link "http://foo.bar/baz_(bam)" "" "http://foo.bar/baz_(bam)"
<> " (" <> link "http://foo.bar" "" "http://foo.bar" <> ")")
, "Reference names with special characters" =:
("A-1-B_2_C:3:D+4+E.5.F_\n\n" ++
".. _A-1-B_2_C:3:D+4+E.5.F: https://example.com\n") =?>
para (link "https://example.com" "" "A-1-B_2_C:3:D+4+E.5.F")
, testGroup "literal / line / code blocks"
[ "indented literal block" =: unlines
[ "::"
, ""
, " block quotes"
, ""
, " can go on for many lines"
, "but must stop here"]
=?> (doc $
codeBlock "block quotes\n\ncan go on for many lines" <>
para "but must stop here")
, "line block with 3 lines" =: "| a\n| b\n| c"
=?> para ("a" <> linebreak <> "b" <> linebreak <> "c")
, "quoted literal block using >" =: "::\n\n> quoted\n> block\n\nOrdinary paragraph"
=?> codeBlock "> quoted\n> block" <> para "Ordinary paragraph"
, "quoted literal block using | (not a line block)" =: "::\n\n| quoted\n| block\n\nOrdinary paragraph"
=?> codeBlock "| quoted\n| block" <> para "Ordinary paragraph"
, "class directive with single paragraph" =: ".. class:: special\n\nThis is a \"special\" paragraph."
=?> divWith ("", ["special"], []) (para "This is a \"special\" paragraph.")
, "class directive with two paragraphs" =: ".. class:: exceptional remarkable\n\n First paragraph.\n\n Second paragraph."
=?> divWith ("", ["exceptional", "remarkable"], []) (para "First paragraph." <> para "Second paragraph.")
, "class directive around literal block" =: ".. class:: classy\n\n::\n\n a\n b"
=?> divWith ("", ["classy"], []) (codeBlock "a\nb")]
, testGroup "interpreted text roles"
[ "literal role prefix" =: ":literal:`a`" =?> para (code "a")
, "literal role postfix" =: "`a`:literal:" =?> para (code "a")
, "literal text" =: "``text``" =?> para (code "text")
, "code role" =: ":code:`a`" =?> para (codeWith ("", ["sourceCode"], []) "a")
, "inherited code role" =: ".. role:: codeLike(code)\n\n:codeLike:`a`"
=?> para (codeWith ("", ["codeLike", "sourceCode"], []) "a")
, "custom code role with language field"
=: ".. role:: lhs(code)\n :language: haskell\n\n:lhs:`a`"
=?> para (codeWith ("", ["lhs", "haskell","sourceCode"], []) "a")
, "custom role with unspecified parent role"
=: ".. role:: classy\n\n:classy:`text`"
=?> para (spanWith ("", ["classy"], []) "text")
, "role with recursive inheritance"
=: ".. role:: haskell(code)\n.. role:: lhs(haskell)\n\n:lhs:`text`"
=?> para (codeWith ("", ["lhs", "haskell", "sourceCode"], []) "text")
, "unknown role" =: ":unknown:`text`" =?> para (str "text")
]
]
| alexvong1995/pandoc | tests/Tests/Readers/RST.hs | gpl-2.0 | 6,859 | 0 | 18 | 2,514 | 1,276 | 698 | 578 | 126 | 1 |
-- C->Haskell Compiler: binding generator
--
-- Author : Manuel M T Chakravarty
-- Created: 17 August 99
--
-- Version $Revision: 1.3 $ from $Date: 2005/10/17 20:41:30 $
--
-- Copyright (c) [1999..2003] Manuel M T Chakravarty
--
-- This file is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This file is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
--- DESCRIPTION ---------------------------------------------------------------
--
-- Module implementing the expansion of the binding hooks.
--
--- DOCU ----------------------------------------------------------------------
--
-- language: Haskell 98
--
-- * If there is an error in one binding hook, it is skipped and the next one
-- is processed (to collect as many errors as possible). However, if at
-- least one error occured, the expansion of binding hooks ends in a fatal
-- exception.
--
-- * `CST' exceptions are used to back off a binding hook as soon as an error
-- is encountered while it is processed.
--
-- Mapping of C types to Haskell FFI types:
-- ----------------------------------------
--
-- The following defines the mapping for basic types. If the type specifer
-- is missing, it is taken to be `int'. In the following, elements enclosed
-- in square brackets are optional.
--
-- void -> ()
-- char -> CChar
-- unsigned char -> CUChar
-- signed char -> CShort
-- signed -> CInt
-- [signed] int -> CInt
-- [signed] short [int] -> CSInt
-- [signed] long [int] -> CLong
-- [signed] long long [int] -> CLLong
-- unsigned [int] -> CUInt
-- unsigned short [int] -> CUShort
-- unsigned long [int] -> CULong
-- unsigned long long [int] -> CULLong
-- float -> CFloat
-- double -> CDouble
-- long double -> CLDouble
-- enum ... -> CInt
-- struct ... -> ** error **
-- union ... -> ** error **
--
-- Plain structures or unions (ie, if not the base type of a pointer type)
-- are not supported at the moment (the underlying FFI does not support them
-- directly). Named types (ie, in C type names defined using `typedef') are
-- traced back to their original definitions. Pointer types are mapped
-- to `Ptr a' or `FunPtr a' depending on whether they point to a functional.
-- Values obtained from bit fields are represented by `CInt' or `CUInt'
-- depending on whether they are signed.
--
-- We obtain the size and alignment constraints for all primitive types of C
-- from `CInfo', which obtains it from the Haskell 98 FFI. In the alignment
-- computations involving bit fields, we assume that the alignment
-- constraints for bitfields (wrt to non-bitfield members) is always the same
-- as for `int' irrespective of the size of the bitfield. This seems to be
-- implicitly guaranteed by K&R A8.3, but it is not entirely clear.
--
-- Identifier lookup:
-- ------------------
--
-- We allow to identify enumerations and structures by the names of `typedef'
-- types aliased to them.
--
-- * enumerations: It is first checked whether there is a tag with the given
-- identifier; if such a tag does not exist, the definition of a typedef
-- with the same name is taken if it exists.
-- * structs/unions: like enumerations
--
-- We generally use `shadow' lookups. When an identifier cannot be found,
-- we check whether - according to the prefix set by the context hook -
-- another identifier casts a shadow that matches. If so, that identifier is
-- taken instead of the original one.
--
--- TODO ----------------------------------------------------------------------
--
-- * A function prototype that uses a defined type on its left hand side may
-- declare a function, while that is not obvious from the declaration
-- itself (without also considering the `typedef'). Calls to such
-- functions are currently rejected, which is a BUG.
--
-- * context hook must precede all but the import hooks
--
-- * The use of `++' in the recursive definition of the routines generating
-- `Enum' instances is not particularly efficient.
--
-- * Some operands are missing in `applyBin' - unfortunately, Haskell does
-- not have standard bit operations. Some constructs are also missing
-- from `evalConstCExpr'. Haskell 98 FFI standardises `Bits'; use that.
--
module GenBind (expandHooks)
where
-- standard libraries
import Data.Char (toUpper, toLower, isSpace)
import Data.List (deleteBy, intersperse, isPrefixOf, find, nubBy)
import Data.Maybe (isNothing, isJust, fromJust, fromMaybe)
import Control.Monad (when, unless, liftM, mapAndUnzipM)
import Data.Bits ((.&.), (.|.), xor, complement)
-- Compiler Toolkit
import Position (Position, Pos(posOf), nopos, builtinPos)
import Errors (interr, todo)
import Idents (Ident, identToLexeme, onlyPosIdent)
import Attributes (newAttrsOnlyPos)
-- C->Haskell
import C2HSConfig (dlsuffix)
import C2HSState (CST, nop, errorsPresent, showErrors, fatal,
SwitchBoard(..), Traces(..), putTraceStr, getSwitch,
printCIO)
import C (AttrC, CObj(..), CTag(..), lookupDefObjC, lookupDefTagC,
CHeader(..), CExtDecl, CDecl(..), CDeclSpec(..),
CStorageSpec(..), CTypeSpec(..), CTypeQual(..),
CStructUnion(..), CStructTag(..), CEnum(..), CDeclr(..),
CInit(..), CExpr(..), CAssignOp(..), CBinaryOp(..),
CUnaryOp(..), CConst (..),
CT, readCT, transCT, getCHeaderCT, runCT, ifCTExc,
raiseErrorCTExc, findValueObj, findFunObj, findTag,
findTypeObj, applyPrefixToNameSpaces, isTypedef,
simplifyDecl, declrFromDecl, declrNamed, structMembers,
structName, tagName, declaredName , structFromDecl,
funResultAndArgs, chaseDecl, findAndChaseDecl,
findObjShadow,
checkForAlias, checkForOneAliasName, lookupEnum,
lookupStructUnion, lookupDeclOrTag, isPtrDeclr,
isArrDeclr, dropPtrDeclr, isPtrDecl, getDeclOf, isFunDeclr,
refersToNewDef, CDef(..))
-- friends
import CHS (CHSModule(..), CHSFrag(..), CHSHook(..), CHSTrans(..),
CHSParm(..), CHSArg(..), CHSAccess(..), CHSAPath(..),
CHSPtrType(..), showCHSParm)
import CInfo (CPrimType(..), size, alignment, bitfieldIntSigned,
bitfieldAlignment)
import GBMonad (TransFun, transTabToTransFun, HsObject(..), GB, HsPtrRep,
initialGBState, setContext, getPrefix, getLock,
delayCode, getDelayedCode, ptrMapsTo, queryPtr, objIs,
queryObj, queryClass, queryPointer, mergeMaps, dumpMaps)
-- default marshallers
-- -------------------
-- FIXME:
-- - we might have a dynamically extended table in the monad if needed (we
-- could marshall enums this way and also save the `id' marshallers for
-- pointers defined via (newtype) pointer hooks)
-- - the checks for the Haskell types are quite kludgy
-- determine the default "in" marshaller for the given Haskell and C types
--
lookupDftMarshIn :: String -> [ExtType] -> GB (Maybe (Ident, CHSArg))
lookupDftMarshIn "Bool" [PrimET pt] | isIntegralCPrimType pt =
return $ Just (cFromBoolIde, CHSValArg)
lookupDftMarshIn hsTy [PrimET pt] | isIntegralHsType hsTy
&&isIntegralCPrimType pt =
return $ Just (cIntConvIde, CHSValArg)
lookupDftMarshIn hsTy [PrimET pt] | isFloatHsType hsTy
&&isFloatCPrimType pt =
return $ Just (cFloatConvIde, CHSValArg)
lookupDftMarshIn "String" [PtrET (PrimET CCharPT)] =
return $ Just (withCStringIde, CHSIOArg)
lookupDftMarshIn "String" [PtrET (PrimET CCharPT), PrimET pt]
| isIntegralCPrimType pt =
return $ Just (withCStringLenIde, CHSIOArg)
lookupDftMarshIn hsTy [PtrET ty] | showExtType ty == hsTy =
return $ Just (withIde, CHSIOArg)
lookupDftMarshIn hsTy [PtrET (PrimET pt)]
| isIntegralHsType hsTy && isIntegralCPrimType pt =
return $ Just (withIntConvIde, CHSIOArg)
lookupDftMarshIn hsTy [PtrET (PrimET pt)]
| isFloatHsType hsTy && isFloatCPrimType pt =
return $ Just (withFloatConvIde, CHSIOArg)
lookupDftMarshIn "Bool" [PtrET (PrimET pt)]
| isIntegralCPrimType pt =
return $ Just (withFromBoolIde, CHSIOArg)
-- FIXME: handle array-list conversion
lookupDftMarshIn _ _ =
return Nothing
-- determine the default "out" marshaller for the given Haskell and C types
--
lookupDftMarshOut :: String -> [ExtType] -> GB (Maybe (Ident, CHSArg))
lookupDftMarshOut "()" _ =
return $ Just (voidIde, CHSVoidArg)
lookupDftMarshOut "Bool" [PrimET pt] | isIntegralCPrimType pt =
return $ Just (cToBoolIde, CHSValArg)
lookupDftMarshOut hsTy [PrimET pt] | isIntegralHsType hsTy
&&isIntegralCPrimType pt =
return $ Just (cIntConvIde, CHSValArg)
lookupDftMarshOut hsTy [PrimET pt] | isFloatHsType hsTy
&&isFloatCPrimType pt =
return $ Just (cFloatConvIde, CHSValArg)
lookupDftMarshOut "String" [PtrET (PrimET CCharPT)] =
return $ Just (peekCStringIde, CHSIOArg)
lookupDftMarshOut "String" [PtrET (PrimET CCharPT), PrimET pt]
| isIntegralCPrimType pt =
return $ Just (peekCStringLenIde, CHSIOArg)
lookupDftMarshOut hsTy [PtrET ty] | showExtType ty == hsTy =
return $ Just (peekIde, CHSIOArg)
-- FIXME: add combination, such as "peek" plus "cIntConv" etc
-- FIXME: handle array-list conversion
lookupDftMarshOut _ _ =
return Nothing
-- check for integral Haskell types
--
isIntegralHsType :: String -> Bool
isIntegralHsType "Int" = True
isIntegralHsType "Int8" = True
isIntegralHsType "Int16" = True
isIntegralHsType "Int32" = True
isIntegralHsType "Int64" = True
isIntegralHsType "Word8" = True
isIntegralHsType "Word16" = True
isIntegralHsType "Word32" = True
isIntegralHsType "Word64" = True
isIntegralHsType _ = False
-- check for floating Haskell types
--
isFloatHsType :: String -> Bool
isFloatHsType "Float" = True
isFloatHsType "Double" = True
isFloatHsType _ = False
-- check for integral C types
--
-- * For marshalling purposes C char's are integral types (see also types
-- classes for which the FFI guarantees instances for `CChar', `CSChar', and
-- `CUChar')
--
isIntegralCPrimType :: CPrimType -> Bool
isIntegralCPrimType = (`elem` [CCharPT, CSCharPT, CIntPT, CShortPT, CLongPT,
CLLongPT, CUIntPT, CUCharPT, CUShortPT,
CULongPT, CULLongPT])
-- check for floating C types
--
isFloatCPrimType :: CPrimType -> Bool
isFloatCPrimType = (`elem` [CFloatPT, CDoublePT, CLDoublePT])
-- standard conversions
--
voidIde = noPosIdent "void" -- never appears in the output
cFromBoolIde = noPosIdent "cFromBool"
cToBoolIde = noPosIdent "cToBool"
cIntConvIde = noPosIdent "cIntConv"
cFloatConvIde = noPosIdent "cFloatConv"
withIde = noPosIdent "with"
withCStringIde = noPosIdent "withCString"
withCStringLenIde = noPosIdent "withCStringLenIntConv"
withIntConvIde = noPosIdent "withIntConv"
withFloatConvIde = noPosIdent "withFloatConv"
withFromBoolIde = noPosIdent "withFromBoolConv"
peekIde = noPosIdent "peek"
peekCStringIde = noPosIdent "peekCString"
peekCStringLenIde = noPosIdent "peekCStringLenIntConv"
-- expansion of binding hooks
-- --------------------------
-- given a C header file and a binding file, expand all hooks in the binding
-- file using the C header information (EXPORTED)
--
-- * together with the module, returns the contents of the .chi file
--
-- * if any error (not warnings) is encountered, a fatal error is raised.
--
-- * also returns all warning messages encountered (last component of result)
--
expandHooks :: AttrC -> CHSModule -> CST s (CHSModule, String, String)
expandHooks ac mod = do
mLock <- getSwitch lockFunSB
(_, res) <- runCT (expandModule mod) ac (initialGBState mLock)
return res
expandModule :: CHSModule -> GB (CHSModule, String, String)
expandModule (CHSModule frags) =
do
-- expand hooks
--
traceInfoExpand
frags' <- expandFrags frags
delayedFrags <- getDelayedCode
-- get .chi dump
--
chi <- dumpMaps
-- check for errors and finalise
--
errs <- errorsPresent
if errs
then do
traceInfoErr
errmsgs <- showErrors
fatal ("Errors during expansion of binding hooks:\n\n" -- fatal error
++ errmsgs)
else do
traceInfoOK
warnmsgs <- showErrors
return (CHSModule (frags' ++ delayedFrags), chi, warnmsgs)
where
traceInfoExpand = putTraceStr tracePhasesSW
("...expanding binding hooks...\n")
traceInfoErr = putTraceStr tracePhasesSW
("...error(s) detected.\n")
traceInfoOK = putTraceStr tracePhasesSW
("...successfully completed.\n")
expandFrags :: [CHSFrag] -> GB [CHSFrag]
expandFrags = liftM concat . mapM expandFrag
expandFrag :: CHSFrag -> GB [CHSFrag]
expandFrag verb@(CHSVerb _ _ ) = return [verb]
expandFrag line@(CHSLine _ ) = return [line]
expandFrag prag@(CHSLang _ _ ) = return [prag]
expandFrag (CHSHook h ) =
do
code <- expandHook h
return [CHSVerb code builtinPos]
`ifCTExc` return [CHSVerb "** ERROR **" builtinPos]
expandFrag (CHSCPP s _ ) =
interr $ "GenBind.expandFrag: Left over CHSCPP!\n---\n" ++ s ++ "\n---"
expandFrag (CHSC s _ ) =
interr $ "GenBind.expandFrag: Left over CHSC!\n---\n" ++ s ++ "\n---"
expandFrag (CHSCond alts dft) =
do
traceInfoCond
select alts
where
select [] = do
traceInfoDft dft
expandFrags (maybe [] id dft)
select ((ide, frags):alts) = do
oobj <- findTag ide
traceInfoVal ide oobj
if isNothing oobj
then
select alts
else -- found right alternative
expandFrags frags
--
traceInfoCond = traceGenBind "** CPP conditional:\n"
traceInfoVal ide oobj = traceGenBind $ identToLexeme ide ++ " is " ++
(if isNothing oobj then "not " else "") ++
"defined.\n"
traceInfoDft dft = if isNothing dft
then
return ()
else
traceGenBind "Choosing else branch.\n"
expandHook :: CHSHook -> GB String
expandHook (CHSImport qual ide chi _) =
do
mergeMaps chi
return $
"import " ++ (if qual then "qualified " else "") ++ identToLexeme ide
expandHook (CHSContext olib oprefix olock _) =
do
setContext olib oprefix olock -- enter context information
mapMaybeM_ applyPrefixToNameSpaces oprefix -- use the prefix on name spaces
return ""
expandHook (CHSType ide pos) =
do
traceInfoType
decl <- findAndChaseDecl ide False True -- no indirection, but shadows
ty <- extractSimpleType pos decl
traceInfoDump decl ty
return $ "(" ++ showExtType ty ++ ")"
where
traceInfoType = traceGenBind "** Type hook:\n"
traceInfoDump decl ty = traceGenBind $
"Declaration\n" ++ show decl ++ "\ntranslates to\n"
++ showExtType ty ++ "\n"
expandHook (CHSSizeof ide pos) =
do
traceInfoSizeof
decl <- findAndChaseDecl ide False True -- no indirection, but shadows
(size, _) <- sizeAlignOf decl
traceInfoDump decl size
return $ show (fromIntegral . padBits $ size)
where
traceInfoSizeof = traceGenBind "** Sizeof hook:\n"
traceInfoDump decl size = traceGenBind $
"Size of declaration\n" ++ show decl ++ "\nis "
++ show (fromIntegral . padBits $ size) ++ "\n"
expandHook (CHSEnum cide oalias chsTrans oprefix derive _) =
do
-- get the corresponding C declaration
--
enum <- lookupEnum cide True -- smart lookup incl error handling
--
-- convert the translation table and generate data type definition code
--
gprefix <- getPrefix
let prefix = fromMaybe gprefix oprefix
trans = transTabToTransFun prefix chsTrans
hide = identToLexeme . fromMaybe cide $ oalias
enumDef enum hide trans (map identToLexeme derive)
expandHook hook@(CHSCall isPure isUns isNol ide oalias pos) =
do
traceEnter
-- get the corresponding C declaration; raises error if not found or not a
-- function; we use shadow identifiers, so the returned identifier is used
-- afterwards instead of the original one
--
(ObjCO cdecl, ide) <- findFunObj ide True
mLock <- if isNol then return Nothing else getLock
let ideLexeme = identToLexeme ide -- orignal name might have been a shadow
hsLexeme = ideLexeme `maybe` identToLexeme $ oalias
cdecl' = ide `simplifyDecl` cdecl
callImport hook isPure isUns mLock ideLexeme hsLexeme cdecl' pos
where
traceEnter = traceGenBind $
"** Call hook for `" ++ identToLexeme ide ++ "':\n"
expandHook hook@(CHSFun isPure isUns isNol ide oalias ctxt parms parm pos) =
do
traceEnter
-- get the corresponding C declaration; raises error if not found or not a
-- function; we use shadow identifiers, so the returned identifier is used
-- afterwards instead of the original one
--
(ObjCO cdecl, cide) <- findFunObj ide True
mLock <- if isNol then return Nothing else getLock
let ideLexeme = identToLexeme ide -- orignal name might have been a shadow
hsLexeme = ideLexeme `maybe` identToLexeme $ oalias
fiLexeme = hsLexeme ++ "'_" -- *Urgh* - probably unique...
fiIde = onlyPosIdent nopos fiLexeme
cdecl' = cide `simplifyDecl` cdecl
callHook = CHSCall isPure isUns isNol cide (Just fiIde) pos
callImport callHook isPure isUns mLock (identToLexeme cide) fiLexeme cdecl' pos
funDef isPure hsLexeme fiLexeme cdecl' ctxt mLock parms parm pos
where
traceEnter = traceGenBind $
"** Fun hook for `" ++ identToLexeme ide ++ "':\n"
expandHook (CHSField access path pos) =
do
traceInfoField
(decl, offsets) <- accessPath path
traceDepth offsets
ty <- extractSimpleType pos decl
traceValueType ty
setGet pos access offsets ty
where
accessString = case access of
CHSGet -> "Get"
CHSSet -> "Set"
traceInfoField = traceGenBind $ "** " ++ accessString ++ " hook:\n"
traceDepth offsets = traceGenBind $ "Depth of access path: "
++ show (length offsets) ++ "\n"
traceValueType et = traceGenBind $
"Type of accessed value: " ++ showExtType et ++ "\n"
expandHook (CHSPointer isStar cName oalias ptrKind isNewtype oRefType pos) =
do
traceInfoPointer
let hsIde = fromMaybe cName oalias
hsName = identToLexeme hsIde
hsIde `objIs` Pointer ptrKind isNewtype -- register Haskell object
--
-- we check for a typedef declaration or tag (struct, union, or enum)
--
declOrTag <- lookupDeclOrTag cName True
case declOrTag of
Left cdecl -> do -- found a typedef declaration
cNameFull <- case declaredName cdecl of
Just ide -> return ide
Nothing -> interr
"GenBind.expandHook: Where is the name?"
cNameFull `refersToNewDef` ObjCD (TypeCO cdecl)
-- assoc needed for chasing
traceInfoCName "declaration" cNameFull
unless (isStar || isPtrDecl cdecl) $
ptrExpectedErr (posOf cName)
(hsType, isFun) <-
case oRefType of
Nothing -> do
cDecl <- chaseDecl cNameFull (not isStar)
et <- extractPtrType cDecl
let et' = adjustPtr isStar et
return (showExtType et', isFunExtType et')
Just hsType -> return (identToLexeme hsType, False)
-- FIXME: it is not possible to determine whether `hsType'
-- is a function; we would need to extend the syntax to
-- allow `... -> fun HSTYPE' to explicitly mark function
-- types if this ever becomes important
traceInfoHsType hsName hsType
realCName <- liftM (maybe cName snd) $ findObjShadow cName
pointerDef isStar realCName hsName ptrKind isNewtype hsType isFun
Right tag -> do -- found a tag definition
let cNameFull = tagName tag
traceInfoCName "tag definition" cNameFull
unless isStar $ -- tags need an explicit `*'
ptrExpectedErr (posOf cName)
let hsType = case oRefType of
Nothing -> "()"
Just hsType -> identToLexeme hsType
traceInfoHsType hsName hsType
pointerDef isStar cNameFull hsName ptrKind isNewtype hsType False
where
-- remove a pointer level if the first argument is `False'
--
adjustPtr True et = et
adjustPtr False (PtrET et) = et
adjustPtr _ _ = interr "GenBind.adjustPtr: Where is the Ptr?"
--
traceInfoPointer = traceGenBind "** Pointer hook:\n"
traceInfoCName kind ide = traceGenBind $
"found C " ++ kind ++ " for `" ++ identToLexeme ide ++ "'\n"
traceInfoHsType name ty = traceGenBind $
"associated with Haskell entity `" ++ name ++ "'\nhaving type " ++ ty
++ "\n"
expandHook (CHSClass oclassIde classIde typeIde pos) =
do
traceInfoClass
classIde `objIs` Class oclassIde typeIde -- register Haskell object
superClasses <- collectClasses oclassIde
Pointer ptrType isNewtype <- queryPointer typeIde
when (ptrType == CHSStablePtr) $
illegalStablePtrErr pos
classDef pos (identToLexeme classIde) (identToLexeme typeIde)
ptrType isNewtype superClasses
where
-- compile a list of all super classes (the direct super class first)
--
collectClasses :: Maybe Ident -> GB [(String, String, HsObject)]
collectClasses Nothing = return []
collectClasses (Just ide) =
do
Class oclassIde typeIde <- queryClass ide
ptr <- queryPointer typeIde
classes <- collectClasses oclassIde
return $ (identToLexeme ide, identToLexeme typeIde, ptr) : classes
--
traceInfoClass = traceGenBind $ "** Class hook:\n"
-- produce code for an enumeration
--
-- * an extra instance declaration is required when any of the enumeration
-- constants is explicitly assigned a value in its definition
--
-- * the translation function strips prefixes where possible (different
-- enumerators maye have different prefixes)
--
enumDef :: CEnum -> String -> TransFun -> [String] -> GB String
enumDef cenum@(CEnum _ list _) hident trans userDerive =
do
(list', enumAuto) <- evalTagVals list
let enumVals = [(trans ide, cexpr) | (ide, cexpr) <- list'] -- translate
defHead = enumHead hident
defBody = enumBody (length defHead - 2) enumVals
inst = makeDerives
(if enumAuto then "Enum" : userDerive else userDerive) ++
if enumAuto then "\n" else "\n" ++ enumInst hident enumVals
return $ defHead ++ defBody ++ inst
where
cpos = posOf cenum
--
evalTagVals [] = return ([], True)
evalTagVals ((ide, Nothing ):list) =
do
(list', derived) <- evalTagVals list
return ((ide, Nothing):list', derived)
evalTagVals ((ide, Just exp):list) =
do
(list', derived) <- evalTagVals list
val <- evalConstCExpr exp
case val of
IntResult val' ->
return ((ide, Just $ CConst (CIntConst val' at1) at2):list',
False)
FloatResult _ ->
illegalConstExprErr (posOf exp) "a float result"
where
at1 = newAttrsOnlyPos nopos
at2 = newAttrsOnlyPos nopos
makeDerives [] = ""
makeDerives dList = "deriving (" ++ concat (intersperse "," dList) ++")"
-- Haskell code for the head of an enumeration definition
--
enumHead :: String -> String
enumHead ident = "data " ++ ident ++ " = "
-- Haskell code for the body of an enumeration definition
--
enumBody :: Int -> [(String, Maybe CExpr)] -> String
enumBody indent [] = ""
enumBody indent ((ide, _):list) =
ide ++ "\n" ++ replicate indent ' '
++ (if null list then "" else "| " ++ enumBody indent list)
-- Haskell code for an instance declaration for `Enum'
--
-- * the expression of all explicitly specified tag values already have to be
-- in normal form, ie, to be an int constant
--
-- * enumerations start at 0 and whenever an explicit value is specified,
-- following tags are assigned values continuing from the explicitly
-- specified one
--
enumInst :: String -> [(String, Maybe CExpr)] -> String
enumInst ident list =
"instance Enum " ++ ident ++ " where\n"
++ fromDef flatList ++ "\n" ++ toDef flatList ++ "\n"
++ succDef names ++ "\n" ++ predDef names ++ "\n"
++ enumFromToDef names
where
names = map fst list
flatList = flatten list 0
flatten [] n = []
flatten ((ide, exp):list) n = (ide, val) : flatten list (val + 1)
where
val = case exp of
Nothing -> n
Just (CConst (CIntConst m _) _) -> m
Just _ -> interr "GenBind.enumInst: Integer constant expected!"
show' x = if x < 0 then "(" ++ show x ++ ")" else show x
fromDef list = concat
[ " fromEnum " ++ ide ++ " = " ++ show' val ++ "\n"
| (ide, val) <- list
]
toDef list = concat
[ " toEnum " ++ show' val ++ " = " ++ ide ++ "\n"
| (ide, val) <- nubBy (\x y -> snd x == snd y) list
]
++ " toEnum unmatched = error (\"" ++ ident
++ ".toEnum: Cannot match \" ++ show unmatched)\n"
succDef [] = " succ _ = undefined\n"
succDef [x] = " succ _ = undefined\n"
succDef (x:x':xs) =
" succ " ++ x ++ " = " ++ x' ++ "\n"
++ succDef (x':xs)
predDef [] = " pred _ = undefined\n"
predDef [x] = " pred _ = undefined\n"
predDef (x:x':xs) =
" pred " ++ x' ++ " = " ++ x ++ "\n"
++ predDef (x':xs)
enumFromToDef [] = ""
enumFromToDef names =
" enumFromTo x y | fromEnum x == fromEnum y = [ y ]\n"
++ " | otherwise = x : enumFromTo (succ x) y\n"
++ " enumFrom x = enumFromTo x " ++ last names ++ "\n"
++ " enumFromThen _ _ = "
++ " error \"Enum "++ident++": enumFromThen not implemented\"\n"
++ " enumFromThenTo _ _ _ = "
++ " error \"Enum "++ident++": enumFromThenTo not implemented\"\n"
-- generate a foreign import declaration that is put into the delayed code
--
-- * the C declaration is a simplified declaration of the function that we
-- want to import into Haskell land
--
callImport :: CHSHook -> Bool -> Bool -> Maybe String -> String -> String
-> CDecl -> Position -> GB String
callImport hook isPure isUns mLock ideLexeme hsLexeme cdecl pos =
do
-- compute the external type from the declaration, and delay the foreign
-- export declaration
--
(mHsPtrRep, extType) <- extractFunType pos cdecl isPure
header <- getSwitch headerSB
delayCode hook (foreignImport header ideLexeme hsLexeme isUns extType)
traceFunType extType
-- if the type any special pointer aliases, generate a lambda expression
-- which strips off the constructors
if any isJust mHsPtrRep
then createLambdaExpr mHsPtrRep
else return funStr
where
createLambdaExpr :: [Maybe HsPtrRep] -> GB String
createLambdaExpr foreignVec = return $
"(\\" ++
unwords (zipWith wrPattern foreignVec [1..])++ " -> "++
concat (zipWith wrForPtr foreignVec [1..])++funStr++" "++
unwords (zipWith wrArg foreignVec [1..])++")"
wrPattern (Just (_,_,Just con,_)) n = "("++con++" arg"++show n++")"
wrPattern _ n = "arg"++show n
wrForPtr (Just (_,CHSForeignPtr,_,_)) n
= "withForeignPtr arg"++show n++" $ \\argPtr"++show n++" ->"
wrForPtr _ n = ""
wrArg (Just (_,CHSForeignPtr,_,_)) n = "argPtr"++show n
wrArg (Just (_,CHSStablePtr,_,_)) n =
"(castStablePtrToPtr arg"++show n++")"
wrArg _ n = "arg"++show n
funStr = case mLock of Nothing -> hsLexeme
Just lockFun -> lockFun ++ " $ " ++ hsLexeme
traceFunType et = traceGenBind $
"Imported function type: " ++ showExtType et ++ "\n"
-- Haskell code for the foreign import declaration needed by a call hook
--
-- On Windows, the paths for headers in "entity" may include backslashes, like
-- dist\build\System\Types\GIO.h
-- It seems GHC expects these to be escaped. Below, we make an educated guess
-- that it in fact expects a Haskell string, and use the "show" function to do
-- the escaping of this (and any other cases) for us.
foreignImport :: String -> String -> String -> Bool -> ExtType -> String
foreignImport header ident hsIdent isUnsafe ty =
"foreign import ccall " ++ safety ++ " " ++ show entity ++
"\n " ++ hsIdent ++ " :: " ++ showExtType ty ++ "\n"
where
safety = if isUnsafe then "unsafe" else "safe"
entity | null header = ident
| otherwise = header ++ " " ++ ident
-- produce a Haskell function definition for a fun hook
--
funDef :: Bool -- pure function?
-> String -- name of the new Haskell function
-> String -- Haskell name of the foreign imported C function
-> CDecl -- simplified declaration of the C function
-> Maybe String -- type context of the new Haskell function
-> Maybe String -- lock function
-> [CHSParm] -- parameter marshalling description
-> CHSParm -- result marshalling description
-> Position -- source location of the hook
-> GB String -- Haskell code in text form
funDef isPure hsLexeme fiLexeme cdecl octxt mLock parms parm pos =
do
(parms', parm', isImpure) <- addDftMarshaller pos parms parm cdecl
traceMarsh parms' parm' isImpure
let
sig = hsLexeme ++ " :: " ++ funTy parms' parm' ++ "\n"
marshs = [marshArg i parm | (i, parm) <- zip [1..] parms']
funArgs = [funArg | (funArg, _, _, _, _) <- marshs, funArg /= ""]
marshIns = [marshIn | (_, marshIn, _, _, _) <- marshs]
callArgs = [callArg | (_, _, callArg, _, _) <- marshs]
marshOuts = [marshOut | (_, _, _, marshOut, _) <- marshs, marshOut /= ""]
retArgs = [retArg | (_, _, _, _, retArg) <- marshs, retArg /= ""]
funHead = hsLexeme ++ join funArgs ++ " =\n" ++
if isPure && isImpure then " unsafePerformIO $\n" else ""
lock = case mLock of Nothing -> ""
Just lock -> lock ++ " $"
call = if isPure
then " let {res = " ++ fiLexeme ++ join callArgs ++ "} in\n"
else " " ++ lock ++ fiLexeme ++ join callArgs ++ " >>= \\res ->\n"
marshRes = case parm' of
CHSParm _ _ twoCVal (Just (_ , CHSVoidArg)) _ -> ""
CHSParm _ _ twoCVal (Just (omIde, CHSIOArg )) _ ->
" " ++ identToLexeme omIde ++ " res >>= \\res' ->\n"
CHSParm _ _ twoCVal (Just (omIde, CHSValArg )) _ ->
" let {res' = " ++ identToLexeme omIde ++ " res} in\n"
CHSParm _ _ _ Nothing _ ->
interr "GenBind.funDef: marshRes: no default?"
retArgs' = case parm' of
CHSParm _ _ _ (Just (_, CHSVoidArg)) _ -> retArgs
_ -> "res'":retArgs
ret = "(" ++ concat (intersperse ", " retArgs') ++ ")"
funBody = joinLines marshIns ++
call ++
joinLines marshOuts ++
marshRes ++
" " ++
(if isImpure || not isPure then "return " else "") ++ ret
return $ sig ++ funHead ++ funBody
where
join = concatMap (' ':)
joinLines = concatMap (\s -> " " ++ s ++ "\n")
--
-- construct the function type
--
-- * specified types appear in the argument and result only if their "in"
-- and "out" marshaller, respectively, is not the `void' marshaller
--
funTy parms parm =
let
ctxt = case octxt of
Nothing -> ""
Just ctxtStr -> ctxtStr ++ " => "
argTys = [ty | CHSParm im ty _ _ _ <- parms , notVoid im]
resTys = [ty | CHSParm _ ty _ om _ <- parm:parms, notVoid om]
resTup = let
(lp, rp) = if isPure && length resTys == 1
then ("", "")
else ("(", ")")
io = if isPure then "" else "IO "
in
io ++ lp ++ concat (intersperse ", " resTys) ++ rp
in
ctxt ++ concat (intersperse " -> " (argTys ++ [resTup]))
where
notVoid Nothing = interr "GenBind.funDef: \
\No default marshaller?"
notVoid (Just (_, kind)) = kind /= CHSVoidArg
--
-- for an argument marshaller, generate all "in" and "out" marshalling
-- code fragments
--
marshArg i (CHSParm (Just (imIde, imArgKind)) _ twoCVal
(Just (omIde, omArgKind)) _ ) =
let
a = "a" ++ show i
imStr = identToLexeme imIde
imApp = imStr ++ " " ++ a
funArg = if imArgKind == CHSVoidArg then "" else a
inBndr = if twoCVal
then "(" ++ a ++ "'1, " ++ a ++ "'2)"
else a ++ "'"
marshIn = case imArgKind of
CHSVoidArg -> imStr ++ " $ \\" ++ inBndr ++ " -> "
CHSIOArg -> imApp ++ " $ \\" ++ inBndr ++ " -> "
CHSValArg -> "let {" ++ inBndr ++ " = " ++
imApp ++ "} in "
callArg = if twoCVal
then "" ++ a ++ "'1 " ++ a ++ "'2"
else a ++ "'"
omApp = identToLexeme omIde ++ " " ++ callArg
outBndr = a ++ "''"
marshOut = case omArgKind of
CHSVoidArg -> ""
CHSIOArg -> omApp ++ ">>= \\" ++ outBndr ++ " -> "
CHSValArg -> "let {" ++ outBndr ++ " = " ++
omApp ++ "} in "
retArg = if omArgKind == CHSVoidArg then "" else outBndr
in
(funArg, marshIn, callArg, marshOut, retArg)
marshArg _ _ = interr "GenBind.funDef: Missing default?"
--
traceMarsh parms parm isImpure = traceGenBind $
"Marshalling specification including defaults: \n" ++
showParms (parms ++ [parm]) "" ++
" The marshalling is " ++ if isImpure then "impure.\n" else "pure.\n"
where
showParms [] = id
showParms (parm:parms) = showString " "
. showCHSParm parm
. showChar '\n'
. showParms parms
-- add default marshallers for "in" and "out" marshalling
--
addDftMarshaller :: Position -> [CHSParm] -> CHSParm -> CDecl
-> GB ([CHSParm], CHSParm, Bool)
addDftMarshaller pos parms parm cdecl = do
(_, fType) <- extractFunType pos cdecl True
let (resTy, argTys) = splitFunTy fType
(parm' , isImpure1) <- checkResMarsh parm resTy
(parms', isImpure2) <- addDft parms argTys
return (parms', parm', isImpure1 || isImpure2)
where
-- the result marshalling may not use an "in" marshaller and can only have
-- one C value
--
-- * a default marshaller maybe used for "out" marshalling
--
checkResMarsh (CHSParm (Just _) _ _ _ pos) _ =
resMarshIllegalInErr pos
checkResMarsh (CHSParm _ _ True _ pos) _ =
resMarshIllegalTwoCValErr pos
checkResMarsh (CHSParm _ ty _ omMarsh pos) cTy = do
(imMarsh', _ ) <- addDftVoid Nothing
(omMarsh', isImpure) <- addDftOut pos omMarsh ty [cTy]
return (CHSParm imMarsh' ty False omMarsh' pos, isImpure)
--
splitFunTy (FunET UnitET ty ) = splitFunTy ty
splitFunTy (FunET ty1 ty2) = let
(resTy, argTys) = splitFunTy ty2
in
(resTy, ty1:argTys)
splitFunTy resTy = (resTy, [])
--
-- match Haskell with C arguments (and results)
--
addDft ((CHSParm imMarsh hsTy False omMarsh p):parms) (cTy :cTys) = do
(imMarsh', isImpureIn ) <- addDftIn p imMarsh hsTy [cTy]
(omMarsh', isImpureOut) <- addDftVoid omMarsh
(parms' , isImpure ) <- addDft parms cTys
return (CHSParm imMarsh' hsTy False omMarsh' p : parms',
isImpure || isImpureIn || isImpureOut)
addDft ((CHSParm imMarsh hsTy True omMarsh p):parms) (cTy1:cTy2:cTys) = do
(imMarsh', isImpureIn ) <- addDftIn p imMarsh hsTy [cTy1, cTy2]
(omMarsh', isImpureOut) <- addDftVoid omMarsh
(parms' , isImpure ) <- addDft parms cTys
return (CHSParm imMarsh' hsTy True omMarsh' p : parms',
isImpure || isImpureIn || isImpureOut)
addDft [] [] =
return ([], False)
addDft ((CHSParm _ _ _ _ pos):parms) [] =
marshArgMismatchErr pos "This parameter is in excess of the C arguments."
addDft [] (_:_) =
marshArgMismatchErr pos "Parameter marshallers are missing."
--
addDftIn _ imMarsh@(Just (_, kind)) _ _ = return (imMarsh,
kind == CHSIOArg)
addDftIn pos imMarsh@Nothing hsTy cTys = do
marsh <- lookupDftMarshIn hsTy cTys
when (isNothing marsh) $
noDftMarshErr pos "\"in\"" hsTy cTys
return (marsh, case marsh of {Just (_, kind) -> kind == CHSIOArg})
--
addDftOut _ omMarsh@(Just (_, kind)) _ _ = return (omMarsh,
kind == CHSIOArg)
addDftOut pos omMarsh@Nothing hsTy cTys = do
marsh <- lookupDftMarshOut hsTy cTys
when (isNothing marsh) $
noDftMarshErr pos "\"out\"" hsTy cTys
return (marsh, case marsh of {Just (_, kind) -> kind == CHSIOArg})
--
-- add void marshaller if no explict one is given
--
addDftVoid marsh@(Just (_, kind)) = return (marsh, kind == CHSIOArg)
addDftVoid Nothing = do
return (Just (noPosIdent "void", CHSVoidArg), False)
-- compute from an access path, the declarator finally accessed and the index
-- path required for the access
--
-- * each element in the index path specifies dereferencing an address and the
-- offset to be added to the address before dereferencing
--
-- * the returned declaration is already normalised (ie, alias have been
-- expanded)
--
-- * it may appear as if `t.m' and `t->m' should have different access paths,
-- as the latter specifies one more dereferencing; this is certainly true in
-- C, but it doesn't apply here, as `t.m' is merely provided for the
-- convenience of the interface writer - it is strictly speaking an
-- impossible access paths, as in Haskell we always have a pointer to a
-- structure, we can never have the structure as a value itself
--
accessPath :: CHSAPath -> GB (CDecl, [BitSize])
accessPath (CHSRoot ide) = -- t
do
decl <- findAndChaseDecl ide False True
return (ide `simplifyDecl` decl, [BitSize 0 0])
accessPath (CHSDeref (CHSRoot ide) _) = -- *t
do
decl <- findAndChaseDecl ide True True
return (ide `simplifyDecl` decl, [BitSize 0 0])
accessPath (CHSRef root@(CHSRoot ide1) ide2) = -- t.m
do
su <- lookupStructUnion ide1 False True
(offset, decl') <- refStruct su ide2
adecl <- replaceByAlias decl'
return (adecl, [offset])
accessPath (CHSRef (CHSDeref (CHSRoot ide1) _) ide2) = -- t->m
do
su <- lookupStructUnion ide1 True True
(offset, decl') <- refStruct su ide2
adecl <- replaceByAlias decl'
return (adecl, [offset])
accessPath (CHSRef path ide) = -- a.m
do
(decl, offset:offsets) <- accessPath path
assertPrimDeclr ide decl
su <- structFromDecl (posOf ide) decl
(addOffset, decl') <- refStruct su ide
adecl <- replaceByAlias decl'
return (adecl, offset `addBitSize` addOffset : offsets)
where
assertPrimDeclr ide (CDecl _ [declr] _) =
case declr of
(Just (CVarDeclr _ _), _, _) -> nop
_ -> structExpectedErr ide
accessPath (CHSDeref path pos) = -- *a
do
(decl, offsets) <- accessPath path
decl' <- derefOrErr decl
adecl <- replaceByAlias decl'
return (adecl, BitSize 0 0 : offsets)
where
derefOrErr (CDecl specs [declr] at) =
case declr of
(Just (CPtrDeclr [_] declr at), oinit, oexpr) ->
return $ CDecl specs [(Just declr, oinit, oexpr)] at
(Just (CPtrDeclr (_:quals) declr at), oinit, oexpr) ->
return $
CDecl specs [(Just (CPtrDeclr quals declr at), oinit, oexpr)] at
_ ->
ptrExpectedErr pos
-- replaces a decleration by its alias if any
--
-- * the alias inherits any field size specification that the original
-- declaration may have
--
-- * declaration must have exactly one declarator
--
replaceByAlias :: CDecl -> GB CDecl
replaceByAlias cdecl@(CDecl _ [(_, _, size)] at) =
do
ocdecl <- checkForAlias cdecl
case ocdecl of
Nothing -> return cdecl
Just (CDecl specs [(declr, init, _)] at) -> -- form of an alias
return $ CDecl specs [(declr, init, size)] at
-- given a structure declaration and member name, compute the offset of the
-- member in the structure and the declaration of the referenced member
--
refStruct :: CStructUnion -> Ident -> GB (BitSize, CDecl)
refStruct su ide =
do
-- get the list of fields and check for our selector
--
let (fields, tag) = structMembers su
(pre, post) = span (not . flip declNamed ide) fields
when (null post) $
unknownFieldErr (posOf su) ide
--
-- get sizes of preceding fields and the result type (`pre' are all
-- declarators preceding `ide' and the first declarator in `post' defines
-- `ide')
--
let decl = head post
offset <- case tag of
CStructTag -> offsetInStruct pre decl tag
CUnionTag -> return $ BitSize 0 0
return (offset, decl)
-- does the given declarator define the given name?
--
declNamed :: CDecl -> Ident -> Bool
(CDecl _ [(Nothing , _, _)] _) `declNamed` ide = False
(CDecl _ [(Just declr, _, _)] _) `declNamed` ide = declr `declrNamed` ide
(CDecl _ [] _) `declNamed` _ =
interr "GenBind.declNamed: Abstract declarator in structure!"
_ `declNamed` _ =
interr "GenBind.declNamed: More than one declarator!"
-- Haskell code for writing to or reading from a struct
--
setGet :: Position -> CHSAccess -> [BitSize] -> ExtType -> GB String
setGet pos access offsets ty =
do
let pre = case access of
CHSSet -> "(\\ptr val -> do {"
CHSGet -> "(\\ptr -> do {"
body <- setGetBody (reverse offsets)
return $ pre ++ body ++ "})"
where
setGetBody [BitSize offset bitOffset] =
do
let ty' = case ty of
t@(DefinedET _ _) -> PtrET t
t -> t
let tyTag = showExtType ty'
bf <- checkType ty'
case bf of
Nothing -> return $ case access of -- not a bitfield
CHSGet -> peekOp offset tyTag
CHSSet -> pokeOp offset tyTag "val"
--FIXME: must take `bitfieldDirection' into account
Just (_, bs) -> return $ case access of -- a bitfield
CHSGet -> "val <- " ++ peekOp offset tyTag
++ extractBitfield
CHSSet -> "org <- " ++ peekOp offset tyTag
++ insertBitfield
++ pokeOp offset tyTag "val'"
where
-- we have to be careful here to ensure proper sign extension;
-- in particular, shifting right followed by anding a mask is
-- *not* sufficient; instead, we exploit in the following that
-- `shiftR' performs sign extension
--
extractBitfield = "; return $ (val `shiftL` ("
++ bitsPerField ++ " - "
++ show (bs + bitOffset) ++ ")) `shiftR` ("
++ bitsPerField ++ " - " ++ show bs
++ ")"
bitsPerField = show $ size CIntPT * 8
--
insertBitfield = "; let {val' = (org .&. " ++ middleMask
++ ") .|. (val `shiftL` "
++ show bitOffset ++ ")}; "
middleMask = "fromIntegral (((maxBound::CUInt) `shiftL` "
++ show bs ++ ") `rotateL` "
++ show bitOffset ++ ")"
setGetBody (BitSize offset 0 : offsets) =
do
code <- setGetBody offsets
return $ "ptr <- peekByteOff ptr " ++ show offset ++ "; " ++ code
setGetBody (BitSize _ _ : _ ) =
derefBitfieldErr pos
--
-- check that the type can be marshalled and compute extra operations for
-- bitfields
--
checkType (IOET _ ) = interr "GenBind.setGet: Illegal \
\type!"
checkType (UnitET ) = voidFieldErr pos
checkType (PrimET (CUFieldPT bs)) = return $ Just (False, bs)
checkType (PrimET (CSFieldPT bs)) = return $ Just (True , bs)
checkType _ = return Nothing
--
peekOp off tyTag = "peekByteOff ptr " ++ show off ++ " ::IO " ++ tyTag
pokeOp off tyTag var = "pokeByteOff ptr " ++ show off ++ " (" ++ var
++ "::" ++ tyTag ++ ")"
-- generate the type definition for a pointer hook and enter the required type
-- mapping into the `ptrmap'
--
pointerDef :: Bool -- explicit `*' in pointer hook
-> Ident -- full C name
-> String -- Haskell name
-> CHSPtrType -- kind of the pointer
-> Bool -- explicit newtype tag
-> String -- Haskell type expression of pointer argument
-> Bool -- do we have a pointer to a function?
-> GB String
pointerDef isStar cNameFull hsName ptrKind isNewtype hsType isFun =
do
keepOld <- getSwitch oldFFI
let ptrArg = if keepOld
then "()" -- legacy FFI interface
else if isNewtype
then hsName -- abstract type
else hsType -- concrete type
ptrCon = case ptrKind of
CHSPtr | isFun -> "FunPtr"
_ -> show ptrKind
ptrType = ptrCon ++ " (" ++ ptrArg ++ ")"
thePtr = (isStar, cNameFull)
thePtr `ptrMapsTo` (isFun,
ptrKind,
if isNewtype then Just hsName else Nothing,
ptrArg)
return $
if isNewtype
then "newtype " ++ hsName ++ " = " ++ hsName ++ " (" ++ ptrType ++ ")"
else "type " ++ hsName ++ " = " ++ ptrType
-- generate the class and instance definitions for a class hook
--
-- * the pointer type must not be a stable pointer
--
-- * the first super class (if present) must be the direct superclass
--
-- * all Haskell objects in the superclass list must be pointer objects
--
classDef :: Position -- for error messages
-> String -- class name
-> String -- pointer type name
-> CHSPtrType -- type of the pointer
-> Bool -- is a newtype?
-> [(String, String, HsObject)] -- superclasses
-> GB String
classDef pos className typeName ptrType isNewtype superClasses =
do
let
toMethodName = case typeName of
"" -> interr "GenBind.classDef: \
\Illegal identifier!"
c:cs -> toLower c : cs
fromMethodName = "from" ++ typeName
classDefContext = case superClasses of
[] -> ""
(superName, _, _):_ -> superName ++ " p => "
classDef =
"class " ++ classDefContext ++ className ++ " p where\n"
++ " " ++ toMethodName ++ " :: p -> " ++ typeName ++ "\n"
++ " " ++ fromMethodName ++ " :: " ++ typeName ++ " -> p\n"
instDef =
"instance " ++ className ++ " " ++ typeName ++ " where\n"
++ " " ++ toMethodName ++ " = id\n"
++ " " ++ fromMethodName ++ " = id\n"
instDefs <- castInstDefs superClasses
return $ classDef ++ instDefs ++ instDef
where
castInstDefs [] = return ""
castInstDefs ((superName, ptrName, Pointer ptrType' isNewtype'):classes) =
do
unless (ptrType == ptrType') $
pointerTypeMismatchErr pos className superName
let toMethodName = case ptrName of
"" -> interr "GenBind.classDef: \
\Illegal identifier - 2!"
c:cs -> toLower c : cs
fromMethodName = "from" ++ ptrName
castFun = "cast" ++ show ptrType
typeConstr = if isNewtype then typeName ++ " " else ""
superConstr = if isNewtype' then ptrName ++ " " else ""
instDef =
"instance " ++ superName ++ " " ++ typeName ++ " where\n"
++ " " ++ toMethodName ++ " (" ++ typeConstr ++ "p) = "
++ superConstr ++ "(" ++ castFun ++ " p)\n"
++ " " ++ fromMethodName ++ " (" ++ superConstr ++ "p) = "
++ typeConstr ++ "(" ++ castFun ++ " p)\n"
instDefs <- castInstDefs classes
return $ instDef ++ instDefs
-- C code computations
-- -------------------
-- the result of a constant expression
--
data ConstResult = IntResult Integer
| FloatResult Float
-- types that may occur in foreign declarations, ie, Haskell land types
--
-- * we reprsent C functions with no arguments (ie, the ANSI C `void'
-- argument) by `FunET UnitET res' rather than just `res' internally,
-- although the latter representation is finally emitted into the binding
-- file; this is because we need to know which types are functions (in
-- particular, to distinguish between `Ptr a' and `FunPtr a')
--
-- * aliased types (`DefinedET') are represented by a string plus their C
-- declaration; the latter is for functions interpreting the following
-- structure; an aliased type is always a pointer type that is contained in
-- the pointer map (and got there either from a .chi or from a pointer hook
-- in the same module)
--
-- * the representation for pointers does not distinguish between normal,
-- function, foreign, and stable pointers; function pointers are identified
-- by their argument and foreign and stable pointers are only used
-- indirectly, by referring to type names introduced by a `pointer' hook
--
data ExtType = FunET ExtType ExtType -- function
| IOET ExtType -- operation with side effect
| PtrET ExtType -- typed pointer
| DefinedET CDecl HsPtrRep -- aliased type
| PrimET CPrimType -- basic C type
| UnitET -- void
instance Eq ExtType where
(FunET t1 t2 ) == (FunET t1' t2' ) = t1 == t1' && t2 == t2'
(IOET t ) == (IOET t' ) = t == t'
(PtrET t ) == (PtrET t' ) = t == t'
(DefinedET _ rep ) == (DefinedET _ rep' ) = rep == rep'
(PrimET t ) == (PrimET t' ) = t == t'
UnitET == UnitET = True
-- composite C type
--
data CompType = ExtType ExtType -- external type
| SUType CStructUnion -- structure or union
-- check whether an external type denotes a function type
--
isFunExtType :: ExtType -> Bool
isFunExtType (FunET _ _) = True
isFunExtType (IOET _ ) = True
isFunExtType (DefinedET _ (isFun,_,_,_)) = isFun
isFunExtType _ = False
-- pretty print an external type
--
-- * a previous version of this function attempted to not print unnecessary
-- brackets; this however doesn't work consistently due to `DefinedET'; so,
-- we give up on the idea (preferring simplicity)
--
showExtType :: ExtType -> String
showExtType (FunET UnitET res) = showExtType res
showExtType (FunET arg res) = "(" ++ showExtType arg ++ " -> "
++ showExtType res ++ ")"
showExtType (IOET t) = "(IO " ++ showExtType t ++ ")"
showExtType (PtrET t) = let ptrCon = if isFunExtType t
then "FunPtr" else "Ptr"
in
"(" ++ ptrCon ++ " " ++ showExtType t
++ ")"
showExtType (DefinedET _ (_,_,_,str)) = str
showExtType (PrimET CPtrPT) = "(Ptr ())"
showExtType (PrimET CFunPtrPT) = "(FunPtr ())"
showExtType (PrimET CCharPT) = "CChar"
showExtType (PrimET CUCharPT) = "CUChar"
showExtType (PrimET CSCharPT) = "CSChar"
showExtType (PrimET CIntPT) = "CInt"
showExtType (PrimET CShortPT) = "CShort"
showExtType (PrimET CLongPT) = "CLong"
showExtType (PrimET CLLongPT) = "CLLong"
showExtType (PrimET CUIntPT) = "CUInt"
showExtType (PrimET CUShortPT) = "CUShort"
showExtType (PrimET CULongPT) = "CULong"
showExtType (PrimET CULLongPT) = "CULLong"
showExtType (PrimET CFloatPT) = "CFloat"
showExtType (PrimET CDoublePT) = "CDouble"
showExtType (PrimET CLDoublePT) = "CLDouble"
showExtType (PrimET (CSFieldPT bs)) = "CInt{-:" ++ show bs ++ "-}"
showExtType (PrimET (CUFieldPT bs)) = "CUInt{-:" ++ show bs ++ "-}"
showExtType UnitET = "()"
-- compute the type of the C function declared by the given C object
--
-- * the identifier specifies in which of the declarators we are interested
--
-- * if the third argument is `True', the function result should not be
-- wrapped into an `IO' type
--
-- * the caller has to guarantee that the object does indeed refer to a
-- function
--
extractFunType :: Position -> CDecl -> Bool ->
GB ([Maybe HsPtrRep], ExtType)
extractFunType pos cdecl isPure =
do
-- remove all declarators except that of the function we are processing;
-- then, extract the functions arguments and result type (also check that
-- the function is not variadic); finally, compute the external type for
-- the result
--
let (args, resultDecl, variadic) = funResultAndArgs cdecl
when variadic $
variadicErr pos cpos
preResultType <- liftM (snd . expandSpecialPtrs) $
extractSimpleType pos resultDecl
--
-- we can now add the `IO' monad if this is no pure function
--
let resultType = if isPure
then preResultType
else IOET preResultType
--
-- compute function arguments and create a function type (a function
-- prototype with `void' as its single argument declares a nullary
-- function)
--
(foreignSyn, argTypes) <- liftM (unzip . map expandSpecialPtrs) $
mapM (extractSimpleType pos) args
return (foreignSyn, foldr FunET resultType argTypes)
where
cpos = posOf cdecl
-- provide info on Haskell wrappers around C pointers
expandSpecialPtrs :: ExtType -> (Maybe HsPtrRep, ExtType)
-- no special treatment for a simple type synonym
expandSpecialPtrs all@(DefinedET cdecl (_, CHSPtr, Nothing, _)) =
(Nothing, PtrET all)
-- all other Haskell pointer wrappings require
-- special calling conventions
expandSpecialPtrs all@(DefinedET cdecl hsPtrRep) =
(Just hsPtrRep, PtrET all)
-- non-pointer arguments are passed normal
expandSpecialPtrs all = (Nothing, all)
-- compute a non-struct/union type from the given declaration
--
-- * the declaration may have at most one declarator
--
-- * C functions are represented as `Ptr (FunEt ...)' or `Addr' if in
-- compatibility mode (ie, `--old-ffi=yes')
--
extractSimpleType :: Position -> CDecl -> GB ExtType
extractSimpleType pos cdecl =
do
traceEnter
ct <- extractCompType cdecl
case ct of
ExtType et -> return et
SUType _ -> illegalStructUnionErr (posOf cdecl) pos
where
traceEnter = traceGenBind $
"Entering `extractSimpleType'...\n"
-- compute a Haskell type for a type referenced in a C pointer type
--
-- * the declaration may have at most one declarator
--
-- * struct/union types are mapped to `()'
--
-- * NB: this is by definition not a result type
--
extractPtrType :: CDecl -> GB ExtType
extractPtrType cdecl = do
ct <- extractCompType cdecl
case ct of
ExtType et -> return et
SUType _ -> return UnitET
-- compute a Haskell type from the given C declaration, where C functions are
-- represented by function pointers
--
-- * the declaration may have at most one declarator
--
-- * all C pointers (including functions) are represented as `Addr' if in
-- compatibility mode (--old-ffi)
--
-- * typedef'ed types are chased
--
-- * takes the pointer map into account
--
-- * IMPORTANT NOTE: `sizeAlignOf' relies on `DefinedET' only being produced
-- for pointer types; if this ever changes, we need to
-- handle `DefinedET's differently. The problem is that
-- entries in the pointer map currently prevent
-- `extractCompType' from looking further "into" the
-- definition of that pointer.
--
extractCompType :: CDecl -> GB CompType
extractCompType cdecl@(CDecl specs declrs ats) =
if length declrs > 1
then interr "GenBind.extractCompType: Too many declarators!"
else case declrs of
[(Just declr, _, size)] | isPtrDeclr declr -> ptrType declr
| isFunDeclr declr -> funType
| otherwise -> aliasOrSpecType size
[] -> aliasOrSpecType Nothing
where
-- handle explicit pointer types
--
ptrType declr = do
tracePtrType
let declrs' = dropPtrDeclr declr -- remove indirection
cdecl' = CDecl specs [(Just declrs', Nothing, Nothing)] ats
oalias = checkForOneAliasName cdecl' -- is only an alias remaining?
oHsRepr <- case oalias of
Nothing -> return $ Nothing
Just ide -> queryPtr (True, ide)
case oHsRepr of
Just repr -> ptrAlias repr -- got an alias
Nothing -> do -- no alias => recurs
ct <- extractCompType cdecl'
returnX $ case ct of
ExtType et -> PtrET et
SUType _ -> PtrET UnitET
--
-- handle explicit function types
--
-- FIXME: we currently regard any functions as being impure (ie, being IO
-- functions); is this ever going to be a problem?
--
funType = do
traceFunType
(_, et) <- extractFunType (posOf cdecl) cdecl False
returnX et
--
-- handle all types, which are not obviously pointers or functions
--
aliasOrSpecType :: Maybe CExpr -> GB CompType
aliasOrSpecType size = do
traceAliasOrSpecType size
case checkForOneAliasName cdecl of
Nothing -> specType (posOf cdecl) specs size
Just ide -> do -- this is a typedef alias
traceAlias ide
oHsRepr <- queryPtr (False, ide) -- check for pointer hook alias
case oHsRepr of
Nothing -> do -- skip current alias (only one)
cdecl' <- getDeclOf ide
let CDecl specs [(declr, init, _)] at =
ide `simplifyDecl` cdecl'
sdecl = CDecl specs [(declr, init, size)] at
-- propagate `size' down (slightly kludgy)
extractCompType sdecl
Just repr -> ptrAlias repr -- found a pointer hook alias
--
-- compute the result for a pointer alias
--
ptrAlias (isFun, ptrTy, wrapped, tyArg) =
returnX $ DefinedET cdecl (isFun, ptrTy, wrapped, tyArg)
--
-- wrap an `ExtType' into a `CompType' and convert parametrised pointers
-- to `Addr' if needed
--
returnX retval@(PtrET et) = do
keepOld <- getSwitch oldFFI
if keepOld
then return $ ExtType (PrimET CPtrPT)
else return $ ExtType retval
returnX retval = return $ ExtType retval
--
tracePtrType = traceGenBind $ "extractCompType: explicit pointer type\n"
traceFunType = traceGenBind $ "extractCompType: explicit function type\n"
traceAliasOrSpecType Nothing = traceGenBind $
"extractCompType: checking for alias\n"
traceAliasOrSpecType (Just _) = traceGenBind $
"extractCompType: checking for alias of bitfield\n"
traceAlias ide = traceGenBind $
"extractCompType: found an alias called `" ++ identToLexeme ide ++ "'\n"
-- C to Haskell type mapping described in the DOCU section
--
typeMap :: [([CTypeSpec], ExtType)]
typeMap = [([void] , UnitET ),
([char] , PrimET CCharPT ),
([unsigned, char] , PrimET CUCharPT ),
([signed, char] , PrimET CSCharPT ),
([signed] , PrimET CIntPT ),
([int] , PrimET CIntPT ),
([signed, int] , PrimET CIntPT ),
([short] , PrimET CShortPT ),
([short, int] , PrimET CShortPT ),
([signed, short] , PrimET CShortPT ),
([signed, short, int] , PrimET CShortPT ),
([long] , PrimET CLongPT ),
([long, int] , PrimET CLongPT ),
([signed, long] , PrimET CLongPT ),
([signed, long, int] , PrimET CLongPT ),
([long, long] , PrimET CLLongPT ),
([long, long, int] , PrimET CLLongPT ),
([signed, long, long] , PrimET CLLongPT ),
([signed, long, long, int] , PrimET CLLongPT ),
([unsigned] , PrimET CUIntPT ),
([unsigned, int] , PrimET CUIntPT ),
([unsigned, short] , PrimET CUShortPT ),
([unsigned, short, int] , PrimET CUShortPT ),
([unsigned, long] , PrimET CULongPT ),
([unsigned, long, int] , PrimET CULongPT ),
([unsigned, long, long] , PrimET CULLongPT ),
([unsigned, long, long, int] , PrimET CULLongPT ),
([float] , PrimET CFloatPT ),
([double] , PrimET CDoublePT ),
([long, double] , PrimET CLDoublePT),
([enum] , PrimET CIntPT )]
where
void = CVoidType undefined
char = CCharType undefined
short = CShortType undefined
int = CIntType undefined
long = CLongType undefined
float = CFloatType undefined
double = CDoubleType undefined
signed = CSignedType undefined
unsigned = CUnsigType undefined
enum = CEnumType undefined undefined
-- compute the complex (external) type determined by a list of type specifiers
--
-- * may not be called for a specifier that defines a typedef alias
--
specType :: Position -> [CDeclSpec] -> Maybe CExpr -> GB CompType
specType cpos specs osize =
let tspecs = [ts | CTypeSpec ts <- specs]
in case lookupTSpec tspecs typeMap of
Just et | isUnsupportedType et -> unsupportedTypeSpecErr cpos
| isNothing osize -> return $ ExtType et -- not a bitfield
| otherwise -> bitfieldSpec tspecs et osize -- bitfield
Nothing ->
case tspecs of
[CSUType cu _] -> return $ SUType cu -- struct or union
[CEnumType _ _] -> return $ ExtType (PrimET CIntPT) -- enum
[CTypeDef _ _] -> interr "GenBind.specType: Illegal typedef alias!"
_ -> illegalTypeSpecErr cpos
where
lookupTSpec = lookupBy matches
--
isUnsupportedType (PrimET et) = size et == 0 -- can't be a bitfield (yet)
isUnsupportedType _ = False
--
-- check whether two type specifier lists denote the same type; handles
-- types like `long long' correctly, as `deleteBy' removes only the first
-- occurrence of the given element
--
matches :: [CTypeSpec] -> [CTypeSpec] -> Bool
[] `matches` [] = True
[] `matches` (_:_) = False
(spec:specs) `matches` specs'
| any (eqSpec spec) specs' = specs `matches` deleteBy eqSpec spec specs'
| otherwise = False
--
eqSpec (CVoidType _) (CVoidType _) = True
eqSpec (CCharType _) (CCharType _) = True
eqSpec (CShortType _) (CShortType _) = True
eqSpec (CIntType _) (CIntType _) = True
eqSpec (CLongType _) (CLongType _) = True
eqSpec (CFloatType _) (CFloatType _) = True
eqSpec (CDoubleType _) (CDoubleType _) = True
eqSpec (CSignedType _) (CSignedType _) = True
eqSpec (CUnsigType _) (CUnsigType _) = True
eqSpec (CSUType _ _) (CSUType _ _) = True
eqSpec (CEnumType _ _) (CEnumType _ _) = True
eqSpec (CTypeDef _ _) (CTypeDef _ _) = True
eqSpec _ _ = False
--
bitfieldSpec :: [CTypeSpec] -> ExtType -> Maybe CExpr -> GB CompType
bitfieldSpec tspecs et (Just sizeExpr) = -- never called with `Nothing'
do
let pos = posOf sizeExpr
sizeResult <- evalConstCExpr sizeExpr
case sizeResult of
FloatResult _ -> illegalConstExprErr pos "a float result"
IntResult size' -> do
let size = fromInteger size'
case et of
PrimET CUIntPT -> returnCT $ CUFieldPT size
PrimET CIntPT
| [signed] `matches` tspecs
|| [signed, int] `matches` tspecs -> returnCT $ CSFieldPT size
| [int] `matches` tspecs ->
returnCT $ if bitfieldIntSigned then CSFieldPT size
else CUFieldPT size
_ -> illegalFieldSizeErr pos
where
returnCT = return . ExtType . PrimET
--
int = CIntType undefined
signed = CSignedType undefined
-- offset and size computations
-- ----------------------------
-- precise size representation
--
-- * this is a pair of a number of octets and a number of bits
--
-- * if the number of bits is nonzero, the octet component is aligned by the
-- alignment constraint for `CIntPT' (important for accessing bitfields with
-- more than 8 bits)
--
data BitSize = BitSize Int Int
deriving (Eq, Show)
-- ordering relation compares in terms of required storage units
--
instance Ord BitSize where
bs1@(BitSize o1 b1) < bs2@(BitSize o2 b2) =
padBits bs1 < padBits bs2 || (o1 == o2 && b1 < b2)
bs1 <= bs2 = bs1 < bs2 || bs1 == bs2
-- the <= instance is needed for Ord's compare functions, which is used in
-- the defaults for all other members
-- add two bit size values
--
addBitSize :: BitSize -> BitSize -> BitSize
addBitSize (BitSize o1 b1) (BitSize o2 b2) = BitSize (o1 + o2 + overflow) rest
where
bitsPerBitfield = size CIntPT * 8
(overflow, rest) = (b1 + b2) `divMod` bitsPerBitfield
-- pad any storage unit that is partially used by a bitfield
--
padBits :: BitSize -> Int
padBits (BitSize o 0) = o
padBits (BitSize o _) = o + size CIntPT
-- compute the offset of the declarator in the second argument when it is
-- preceded by the declarators in the first argument
--
offsetInStruct :: [CDecl] -> CDecl -> CStructTag -> GB BitSize
offsetInStruct [] _ _ = return $ BitSize 0 0
offsetInStruct decls decl tag =
do
(offset, _) <- sizeAlignOfStruct decls tag
(_, align) <- sizeAlignOf decl
return $ alignOffset offset align
-- compute the size and alignment (no padding at the end) of a set of
-- declarators from a struct
--
sizeAlignOfStruct :: [CDecl] -> CStructTag -> GB (BitSize, Int)
sizeAlignOfStruct [] _ = return (BitSize 0 0, 1)
sizeAlignOfStruct decls CStructTag =
do
(offset, preAlign) <- sizeAlignOfStruct (init decls) CStructTag
(size, align) <- sizeAlignOf (last decls)
let sizeOfStruct = alignOffset offset align `addBitSize` size
align' = if align > 0 then align else bitfieldAlignment
alignOfStruct = preAlign `max` align'
return (sizeOfStruct, alignOfStruct)
sizeAlignOfStruct decls CUnionTag =
do
(sizes, aligns) <- mapAndUnzipM sizeAlignOf decls
let aligns' = [if align > 0 then align else bitfieldAlignment
| align <- aligns]
return (maximum sizes, maximum aligns')
-- compute the size and alignment of the declarators forming a struct
-- including any end-of-struct padding that is needed to make the struct ``tile
-- in an array'' (K&R A7.4.8)
--
sizeAlignOfStructPad :: [CDecl] -> CStructTag -> GB (BitSize, Int)
sizeAlignOfStructPad decls tag =
do
(size, align) <- sizeAlignOfStruct decls tag
return (alignOffset size align, align)
-- compute the size and alignment constraint of a given C declaration
--
sizeAlignOf :: CDecl -> GB (BitSize, Int)
--
-- * we make use of the assertion that `extractCompType' can only return a
-- `DefinedET' when the declaration is a pointer declaration
--
sizeAlignOf (CDecl specs [(Just declr, _, size)] ats) | isArrDeclr declr =
interr $ "sizeAlignOf: calculating size of constant array not supported."
sizeAlignOf cdecl =
do
ct <- extractCompType cdecl
case ct of
ExtType (FunET _ _ ) -> return (bitSize CFunPtrPT,
alignment CFunPtrPT)
ExtType (IOET _ ) -> interr "GenBind.sizeof: Illegal IO type!"
ExtType (PtrET t )
| isFunExtType t -> return (bitSize CFunPtrPT,
alignment CFunPtrPT)
| otherwise -> return (bitSize CPtrPT, alignment CPtrPT)
ExtType (DefinedET _ _ ) -> return (bitSize CPtrPT, alignment CPtrPT)
-- FIXME: The defined type could be a function pointer!!!
ExtType (PrimET pt ) -> return (bitSize pt, alignment pt)
ExtType UnitET -> voidFieldErr (posOf cdecl)
SUType su ->
do
let (fields, tag) = structMembers su
fields' <- let ide = structName su
in
if (not . null $ fields) || isNothing ide
then return fields
else do -- get the real...
tag <- findTag (fromJust ide) -- ...definition
case tag of
Just (StructUnionCT su) -> return
(fst . structMembers $ su)
_ -> return fields
sizeAlignOfStructPad fields' tag
where
bitSize et | sz < 0 = BitSize 0 (-sz) -- size is in bits
| otherwise = BitSize sz 0
where
sz = size et
-- apply the given alignment constraint at the given offset
--
-- * if the alignment constraint is negative or zero, it is the alignment
-- constraint for a bitfield
--
alignOffset :: BitSize -> Int -> BitSize
alignOffset offset@(BitSize octetOffset bitOffset) align
| align > 0 && bitOffset /= 0 = -- close bitfield first
alignOffset (BitSize (octetOffset + (bitOffset + 7) `div` 8) 0) align
| align > 0 && bitOffset == 0 = -- no bitfields involved
BitSize (((octetOffset - 1) `div` align + 1) * align) 0
| bitOffset == 0 -- start a bitfield
|| overflowingBitfield = -- .. or overflowing bitfield
alignOffset offset bitfieldAlignment
| otherwise = -- stays in current bitfield
offset
where
bitsPerBitfield = size CIntPT * 8
overflowingBitfield = bitOffset - align >= bitsPerBitfield
-- note, `align' is negative
-- constant folding
-- ----------------
-- evaluate a constant expression
--
-- FIXME: this is a bit too simplistic, as the range of expression allowed as
-- constant expression varies depending on the context in which the
-- constant expression occurs
--
evalConstCExpr :: CExpr -> GB ConstResult
evalConstCExpr (CComma _ at) =
illegalConstExprErr (posOf at) "a comma expression"
evalConstCExpr (CAssign _ _ _ at) =
illegalConstExprErr (posOf at) "an assignment"
evalConstCExpr (CCond b (Just t) e _) =
do
bv <- evalConstCExpr b
case bv of
IntResult bvi -> if bvi /= 0 then evalConstCExpr t else evalConstCExpr e
FloatResult _ -> illegalConstExprErr (posOf b) "a float result"
evalConstCExpr (CBinary op lhs rhs at) =
do
lhsVal <- evalConstCExpr lhs
rhsVal <- evalConstCExpr rhs
let (lhsVal', rhsVal') = usualArithConv lhsVal rhsVal
applyBin (posOf at) op lhsVal' rhsVal'
evalConstCExpr (CCast _ _ _) =
todo "GenBind.evalConstCExpr: Casts are not implemented yet."
evalConstCExpr (CUnary op arg at) =
do
argVal <- evalConstCExpr arg
applyUnary (posOf at) op argVal
evalConstCExpr (CSizeofExpr _ _) =
todo "GenBind.evalConstCExpr: sizeof not implemented yet."
evalConstCExpr (CSizeofType decl _) =
do
(size, _) <- sizeAlignOf decl
return $ IntResult (fromIntegral . padBits $ size)
evalConstCExpr (CAlignofExpr _ _) =
todo "GenBind.evalConstCExpr: alignof (GNU C extension) not implemented yet."
evalConstCExpr (CAlignofType decl _) =
do
(_, align) <- sizeAlignOf decl
return $ IntResult (fromIntegral align)
evalConstCExpr (CIndex _ _ at) =
illegalConstExprErr (posOf at) "array indexing"
evalConstCExpr (CCall _ _ at) =
illegalConstExprErr (posOf at) "function call"
evalConstCExpr (CMember _ _ _ at) =
illegalConstExprErr (posOf at) "a . or -> operator"
evalConstCExpr (CVar ide at) =
do
(cobj, _) <- findValueObj ide False
case cobj of
EnumCO ide (CEnum _ enumrs _) -> liftM IntResult $
enumTagValue ide enumrs 0
_ ->
todo $ "GenBind.evalConstCExpr: variable names not implemented yet " ++
show (posOf at)
where
-- FIXME: this is not very nice; instead, CTrav should have some support
-- for determining enum tag values (but then, constant folding needs
-- to be moved to CTrav, too)
--
-- Compute the tag value for `ide' defined in the given enumerator list
--
enumTagValue _ [] _ =
interr "GenBind.enumTagValue: enumerator not in declaration"
enumTagValue ide ((ide', oexpr):enumrs) val =
do
val' <- case oexpr of
Nothing -> return val
Just exp ->
do
val' <- evalConstCExpr exp
case val' of
IntResult val' -> return val'
FloatResult _ ->
illegalConstExprErr (posOf exp) "a float result"
if ide == ide'
then -- found the right enumerator
return val'
else -- continue down the enumerator list
enumTagValue ide enumrs (val' + 1)
evalConstCExpr (CConst c _) =
evalCConst c
evalCConst :: CConst -> GB ConstResult
evalCConst (CIntConst i _ ) = return $ IntResult i
evalCConst (CCharConst c _ ) = return $ IntResult (toInteger (fromEnum c))
evalCConst (CFloatConst s _ ) =
todo "GenBind.evalCConst: Float conversion from literal misses."
evalCConst (CStrConst s at) =
illegalConstExprErr (posOf at) "a string constant"
usualArithConv :: ConstResult -> ConstResult -> (ConstResult, ConstResult)
usualArithConv lhs@(FloatResult _) rhs = (lhs, toFloat rhs)
usualArithConv lhs rhs@(FloatResult _) = (toFloat lhs, rhs)
usualArithConv lhs rhs = (lhs, rhs)
toFloat :: ConstResult -> ConstResult
toFloat x@(FloatResult _) = x
toFloat (IntResult i) = FloatResult . fromIntegral $ i
applyBin :: Position
-> CBinaryOp
-> ConstResult
-> ConstResult
-> GB ConstResult
applyBin cpos CMulOp (IntResult x)
(IntResult y) = return $ IntResult (x * y)
applyBin cpos CMulOp (FloatResult x)
(FloatResult y) = return $ FloatResult (x * y)
applyBin cpos CDivOp (IntResult x)
(IntResult y) = return $ IntResult (x `div` y)
applyBin cpos CDivOp (FloatResult x)
(FloatResult y) = return $ FloatResult (x / y)
applyBin cpos CRmdOp (IntResult x)
(IntResult y) = return$ IntResult (x `mod` y)
applyBin cpos CRmdOp (FloatResult x)
(FloatResult y) =
illegalConstExprErr cpos "a % operator applied to a float"
applyBin cpos CAddOp (IntResult x)
(IntResult y) = return $ IntResult (x + y)
applyBin cpos CAddOp (FloatResult x)
(FloatResult y) = return $ FloatResult (x + y)
applyBin cpos CSubOp (IntResult x)
(IntResult y) = return $ IntResult (x - y)
applyBin cpos CSubOp (FloatResult x)
(FloatResult y) = return $ FloatResult (x - y)
applyBin cpos CShlOp (IntResult x)
(IntResult y) = return $ IntResult (x * 2^y)
applyBin cpos CShlOp (FloatResult x)
(FloatResult y) =
illegalConstExprErr cpos "a << operator applied to a float"
applyBin cpos CShrOp (IntResult x)
(IntResult y) = return $ IntResult (x `div` 2^y)
applyBin cpos CShrOp (FloatResult x)
(FloatResult y) =
illegalConstExprErr cpos "a >> operator applied to a float"
applyBin cpos CAndOp (IntResult x)
(IntResult y) = return $ IntResult (x .&. y)
applyBin cpos COrOp (IntResult x)
(IntResult y) = return $ IntResult (x .|. y)
applyBin cpos CXorOp (IntResult x)
(IntResult y) = return $ IntResult (x `xor` y)
applyBin cpos _ (IntResult x)
(IntResult y) =
todo "GenBind.applyBin: Not yet implemented operator in constant expression."
applyBin cpos _ (FloatResult x)
(FloatResult y) =
todo "GenBind.applyBin: Not yet implemented operator in constant expression."
applyBin _ _ _ _ =
interr "GenBind.applyBinOp: Illegal combination!"
applyUnary :: Position -> CUnaryOp -> ConstResult -> GB ConstResult
applyUnary cpos CPreIncOp _ =
illegalConstExprErr cpos "a ++ operator"
applyUnary cpos CPreDecOp _ =
illegalConstExprErr cpos "a -- operator"
applyUnary cpos CPostIncOp _ =
illegalConstExprErr cpos "a ++ operator"
applyUnary cpos CPostDecOp _ =
illegalConstExprErr cpos "a -- operator"
applyUnary cpos CAdrOp _ =
illegalConstExprErr cpos "a & operator"
applyUnary cpos CIndOp _ =
illegalConstExprErr cpos "a * operator"
applyUnary cpos CPlusOp arg = return arg
applyUnary cpos CMinOp (IntResult x) = return (IntResult (-x))
applyUnary cpos CMinOp (FloatResult x) = return (FloatResult (-x))
applyUnary cpos CCompOp (IntResult x) = return (IntResult (complement x))
applyUnary cpos CNegOp (IntResult x) =
let r = toInteger . fromEnum $ (x == 0)
in return (IntResult r)
applyUnary cpos CNegOp (FloatResult _) =
illegalConstExprErr cpos "! applied to a float"
-- auxilliary functions
-- --------------------
-- create an identifier without position information
--
noPosIdent :: String -> Ident
noPosIdent = onlyPosIdent nopos
-- print trace message
--
traceGenBind :: String -> GB ()
traceGenBind = putTraceStr traceGenBindSW
-- generic lookup
--
lookupBy :: (a -> a -> Bool) -> a -> [(a, b)] -> Maybe b
lookupBy eq x = fmap snd . find (eq x . fst)
-- maps some monad operation into a `Maybe', discarding the result
--
mapMaybeM_ :: Monad m => (a -> m b) -> Maybe a -> m ()
mapMaybeM_ m Nothing = return ()
mapMaybeM_ m (Just a) = m a >> return ()
-- error messages
-- --------------
unknownFieldErr :: Position -> Ident -> GB a
unknownFieldErr cpos ide =
raiseErrorCTExc (posOf ide)
["Unknown member name!",
"The structure has no member called `" ++ identToLexeme ide
++ "'. The structure is defined at",
show cpos ++ "."]
illegalStructUnionErr :: Position -> Position -> GB a
illegalStructUnionErr cpos pos =
raiseErrorCTExc pos
["Illegal structure or union type!",
"There is not automatic support for marshaling of structures and",
"unions; the offending type is declared at "
++ show cpos ++ "."]
illegalTypeSpecErr :: Position -> GB a
illegalTypeSpecErr cpos =
raiseErrorCTExc cpos
["Illegal type!",
"The type specifiers of this declaration do not form a legal ANSI C(89) \
\type."
]
unsupportedTypeSpecErr :: Position -> GB a
unsupportedTypeSpecErr cpos =
raiseErrorCTExc cpos
["Unsupported type!",
"The type specifier of this declaration is not supported by your C \
\compiler."
]
variadicErr :: Position -> Position -> GB a
variadicErr pos cpos =
raiseErrorCTExc pos
["Variadic function!",
"Calling variadic functions is not supported by the FFI; the function",
"is defined at " ++ show cpos ++ "."]
illegalConstExprErr :: Position -> String -> GB a
illegalConstExprErr cpos hint =
raiseErrorCTExc cpos ["Illegal constant expression!",
"Encountered " ++ hint ++ " in a constant expression,",
"which ANSI C89 does not permit."]
voidFieldErr :: Position -> GB a
voidFieldErr cpos =
raiseErrorCTExc cpos ["Void field in struct!",
"Attempt to access a structure field of type void."]
structExpectedErr :: Ident -> GB a
structExpectedErr ide =
raiseErrorCTExc (posOf ide)
["Expected a structure or union!",
"Attempt to access member `" ++ identToLexeme ide ++ "' in something not",
"a structure or union."]
ptrExpectedErr :: Position -> GB a
ptrExpectedErr pos =
raiseErrorCTExc pos
["Expected a pointer object!",
"Attempt to dereference a non-pointer object or to use it in a `pointer' \
\hook."]
illegalStablePtrErr :: Position -> GB a
illegalStablePtrErr pos =
raiseErrorCTExc pos
["Illegal use of a stable pointer!",
"Class hooks cannot be used for stable pointers."]
pointerTypeMismatchErr :: Position -> String -> String -> GB a
pointerTypeMismatchErr pos className superName =
raiseErrorCTExc pos
["Pointer type mismatch!",
"The pointer of the class hook for `" ++ className
++ "' is of a different kind",
"than that of the class hook for `" ++ superName ++ "'; this is illegal",
"as the latter is defined to be an (indirect) superclass of the former."]
illegalFieldSizeErr :: Position -> GB a
illegalFieldSizeErr cpos =
raiseErrorCTExc cpos
["Illegal field size!",
"Only signed and unsigned `int' types may have a size annotation."]
derefBitfieldErr :: Position -> GB a
derefBitfieldErr pos =
raiseErrorCTExc pos
["Illegal dereferencing of a bit field!",
"Bit fields cannot be dereferenced."]
resMarshIllegalInErr :: Position -> GB a
resMarshIllegalInErr pos =
raiseErrorCTExc pos
["Malformed result marshalling!",
"There may not be an \"in\" marshaller for the result."]
resMarshIllegalTwoCValErr :: Position -> GB a
resMarshIllegalTwoCValErr pos =
raiseErrorCTExc pos
["Malformed result marshalling!",
"Two C values (i.e., the `&' symbol) are not allowed for the result."]
marshArgMismatchErr :: Position -> String -> GB a
marshArgMismatchErr pos reason =
raiseErrorCTExc pos
["Function arity mismatch!",
reason]
noDftMarshErr :: Position -> String -> String -> [ExtType] -> GB a
noDftMarshErr pos inOut hsTy cTys =
raiseErrorCTExc pos
["Missing " ++ inOut ++ " marshaller!",
"There is no default marshaller for this combination of Haskell and \
\C type:",
"Haskell type: " ++ hsTy,
"C type : " ++ concat (intersperse " " (map showExtType cTys))]
| k0001/gtk2hs | tools/c2hs/gen/GenBind.hs | gpl-3.0 | 89,277 | 1 | 33 | 29,489 | 19,739 | 10,311 | 9,428 | 1,405 | 24 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -Wall #-}
-- TODO: Complex Numbers
module Language.Fortran.Model.Op.Core.Match where
import Control.Monad ((>=>))
import Data.Typeable
import Control.Lens
import Data.Singletons
import Data.Singletons.Prelude.List
import Data.Vinyl hiding ((:~:), Field)
import Language.Fortran.Model.Op.Core.Core
import Language.Fortran.Model.Singletons
import Language.Fortran.Model.Types
import Language.Fortran.Model.Types.Match
import Language.Fortran.Model.Util
data MatchNumType a where
MatchNumType :: Sing p -> Sing k -> NumericBasicType k -> Prim p k a -> MatchNumType (PrimS a)
-- | Checks if the given type is numeric, and if so returns a proof of that
-- fact.
matchNumType :: D a -> Maybe (MatchNumType a)
matchNumType = matchPrimD >=> \case
MatchPrimD (MatchPrim sp SBTInt) p -> Just (MatchNumType sp SBTInt NBTInt p)
MatchPrimD (MatchPrim sp SBTReal) p -> Just (MatchNumType sp SBTReal NBTReal p)
_ -> Nothing
data MatchNumR a b where
MatchNumR
:: NumericBasicType k1 -> NumericBasicType k2
-> Prim p1 k1 a -> Prim p2 k2 b
-> Prim (PrecMax p1 p2) (BasicTypeMax k1 k2) c
-> MatchNumR (PrimS a) (PrimS b)
-- | Checks if it is possible to perform a binary numeric operation on arguments
-- with the given respective types. If so, returns the type that would result
-- plus some more information about the types.
matchNumR :: D a -> D b -> Maybe (MatchNumR a b)
matchNumR = matchingWith2 matchNumType matchNumType $ \case
(MatchNumType sp1 sk1 nk1 prim1, MatchNumType sp2 sk2 nk2 prim2) ->
makePrim (sPrecMax sp1 sp2) (sBasicTypeMax sk1 sk2) <$$> \case
MakePrim prim3 -> MatchNumR nk1 nk2 prim1 prim2 prim3
primCeil :: Prim p1 k1 a -> Prim p2 k2 b -> Maybe (MakePrim (PrecMax p1 p2) (BasicTypeMax k1 k2))
primCeil prim1 prim2 = case (matchPrim prim1, matchPrim prim2) of
(MatchPrim p1 k1, MatchPrim p2 k2) -> makePrim (sPrecMax p1 p2) (sBasicTypeMax k1 k2)
data MatchCompareR a b where
MatchCompareR :: ComparableBasicTypes k1 k2 -> Prim p1 k1 a -> Prim p2 k2 b -> MatchCompareR (PrimS a) (PrimS b)
-- | Checks if it is possible to perform a binary comparison (equality or
-- relational) operation on arguments with the given respective types. If so,
-- returns proof of that fact.
matchCompareR :: D a -> D b -> Maybe (MatchCompareR a b)
matchCompareR =
(matchingWithBoth matchNumR $ Just . \case
MatchNumR nk1 nk2 p1 p2 _ -> MatchCompareR (CBTNum nk1 nk2) p1 p2
) `altf2`
(matchingWith2 matchPrimD matchPrimD $ \case
(MatchPrimD (MatchPrim _ SBTLogical) p1, MatchPrimD (MatchPrim _ SBTLogical) p2) ->
Just (MatchCompareR CBTBool p1 p2)
(MatchPrimD (MatchPrim _ SBTChar) p1, MatchPrimD (MatchPrim _ SBTChar) p2) ->
Just (MatchCompareR CBTChar p1 p2)
_ -> Nothing
)
--------------------------------------------------------------------------------
-- Matching on operator result types
--------------------------------------------------------------------------------
data MatchOpSpec ok args where
MatchOpSpec :: OpSpec ok args result -> D result -> MatchOpSpec ok args
-- | Checks if it is possible to apply the given operator to the given
-- arguments, and if so returns a proof of that fact, packaged with information
-- about the result of applying the operator.
matchOpSpec :: Op (Length args) ok -> Rec D args -> Maybe (MatchOpSpec ok args)
matchOpSpec operator argTypes =
case argTypes of
RNil -> case operator of
OpLit -> Nothing
d1 :& RNil -> case operator of
OpNeg -> argsNumeric <$$> \case
MatchNumType _ _ nk p :& RNil -> MatchOpSpec (OSNum1 nk p p) d1
OpPos -> argsNumeric <$$> \case
MatchNumType _ _ nk p :& RNil -> MatchOpSpec (OSNum1 nk p p) d1
OpNot -> argsPrim >>= \case
MatchPrimD (MatchPrim _ SBTLogical) p :& RNil -> Just $ MatchOpSpec (OSLogical1 p PBool8) (DPrim PBool8)
_ -> Nothing
-- In the deref case, we don't have access to a particular field to
-- dereference, so there's nothing we can return.
OpDeref -> Nothing
d1 :& d2 :& RNil -> case operator of
OpAdd -> matchNumR d1 d2 <$$> \case
MatchNumR nk1 nk2 p1 p2 p3 -> MatchOpSpec (OSNum2 nk1 nk2 p1 p2 p3) (DPrim p3)
OpSub -> matchNumR d1 d2 <$$> \case
MatchNumR nk1 nk2 p1 p2 p3 -> MatchOpSpec (OSNum2 nk1 nk2 p1 p2 p3) (DPrim p3)
OpMul -> matchNumR d1 d2 <$$> \case
MatchNumR nk1 nk2 p1 p2 p3 -> MatchOpSpec (OSNum2 nk1 nk2 p1 p2 p3) (DPrim p3)
OpDiv -> matchNumR d1 d2 <$$> \case
MatchNumR nk1 nk2 p1 p2 p3 -> MatchOpSpec (OSNum2 nk1 nk2 p1 p2 p3) (DPrim p3)
OpAnd -> argsPrim >>= \case
MatchPrimD (MatchPrim _ SBTLogical) p1 :& MatchPrimD (MatchPrim _ SBTLogical) p2 :& RNil ->
Just $ MatchOpSpec (OSLogical2 p1 p2 PBool8) (DPrim PBool8)
_ -> Nothing
OpOr -> argsPrim >>= \case
MatchPrimD (MatchPrim _ SBTLogical) p1 :& MatchPrimD (MatchPrim _ SBTLogical) p2 :& RNil ->
Just $ MatchOpSpec (OSLogical2 p1 p2 PBool8) (DPrim PBool8)
_ -> Nothing
OpEquiv -> argsPrim >>= \case
MatchPrimD (MatchPrim _ SBTLogical) p1 :& MatchPrimD (MatchPrim _ SBTLogical) p2 :& RNil ->
Just $ MatchOpSpec (OSLogical2 p1 p2 PBool8) (DPrim PBool8)
_ -> Nothing
OpNotEquiv -> argsPrim >>= \case
MatchPrimD (MatchPrim _ SBTLogical) p1 :& MatchPrimD (MatchPrim _ SBTLogical) p2 :& RNil ->
Just $ MatchOpSpec (OSLogical2 p1 p2 PBool8) (DPrim PBool8)
_ -> Nothing
OpEq -> matchCompareR d1 d2 <$$> \case
MatchCompareR cmp p1 p2 -> MatchOpSpec (OSEq cmp p1 p2 PBool8) (DPrim PBool8)
OpNE -> matchCompareR d1 d2 <$$> \case
MatchCompareR cmp p1 p2 -> MatchOpSpec (OSEq cmp p1 p2 PBool8) (DPrim PBool8)
OpLT -> matchCompareR d1 d2 <$$> \case
MatchCompareR cmp p1 p2 -> MatchOpSpec (OSRel cmp p1 p2 PBool8) (DPrim PBool8)
OpLE -> matchCompareR d1 d2 <$$> \case
MatchCompareR cmp p1 p2 -> MatchOpSpec (OSRel cmp p1 p2 PBool8) (DPrim PBool8)
OpGT -> matchCompareR d1 d2 <$$> \case
MatchCompareR cmp p1 p2 -> MatchOpSpec (OSRel cmp p1 p2 PBool8) (DPrim PBool8)
OpGE -> matchCompareR d1 d2 <$$> \case
MatchCompareR cmp p1 p2 -> MatchOpSpec (OSRel cmp p1 p2 PBool8) (DPrim PBool8)
OpLookup -> with (d1, d2) $ traverseOf _2 matchPrimD >=> \case
(DArray (Index pi1) av, MatchPrimD _ pi2) -> case eqPrim pi1 pi2 of
Just Refl -> Just $ MatchOpSpec (OSLookup d1) (dArrValue av)
_ -> Nothing
_ -> Nothing
_ -> Nothing
where
argsNumeric = rtraverse matchNumType argTypes
argsPrim = rtraverse matchPrimD argTypes
| dorchard/camfort | src/Language/Fortran/Model/Op/Core/Match.hs | apache-2.0 | 7,394 | 0 | 21 | 1,902 | 2,213 | 1,108 | 1,105 | 120 | 28 |
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE FlexibleContexts #-}
module Pontarius.E2E.Message where
import Control.Monad
import Control.Monad.Except
import Control.Monad.State.Strict
import qualified Crypto.Random as CRandom
import qualified Data.ByteString as BS
import Data.Word (Word8)
import Pontarius.E2E.Monad
import Pontarius.E2E.Types
import Pontarius.E2E.Helpers
import Pontarius.E2E.Serialize
decryptDataMessage :: CRandom.CPRG g => DataMessage -> E2E g BS.ByteString
decryptDataMessage msg = do
s <- get
unless (isEncrypted $ msgState s) . throwError
$ WrongState "decryptDataMessage"
MK{ recvEncKey
, recvMacKey } <- makeMessageKeys (senderKeyID msg) (recipientKeyID msg)
check <- parameter paramCheckMac
protocolGuard MACFailure "message" $ check recvMacKey (encodeMessageBytes msg)
(messageMAC msg)
case () of () | recipientKeyID msg == ourKeyID s -> return ()
| recipientKeyID msg == ourKeyID s + 1 -> shiftKeys
| otherwise -> throwError $ ProtocolError WrongKeyID ""
pl <- decCtr recvEncKey (ctrHi msg) (messageEnc msg)
shiftTheirKeys (nextDHy msg) (senderKeyID msg)
return pl
where
isEncrypted MsgStateEncrypted{} = True
isEncrypted _ = False
shiftKeys = do
newDH <- makeDHKeyPair
s <- get
put s{ ourPreviousKey = ourCurrentKey s
, ourCurrentKey = nextDH s
, nextDH = newDH
, ourKeyID = ourKeyID s + 1
}
shiftTheirKeys newKey keyID = do
s <- get
when (keyID == theirKeyID s) $
put s{ theirPreviousKey = theirCurrentKey s
, theirCurrentKey = Just newKey
, theirKeyID = theirKeyID s + 1
}
makeMessageKeys :: Integer
-> Integer
-> E2E g MessageKeys
makeMessageKeys tKeyID oKeyID = do
s <- get
tck <- case ( tKeyID == theirKeyID s - 1
, tKeyID == theirKeyID s
, theirPreviousKey s
, theirCurrentKey s
) of
(True, _ , Just tpk , _ ) -> return tpk
(True, _ , Nothing , _ ) -> throwError NoPeerDHKey
(_ , True, _ , Just tck ) -> return tck
(_ , True, _ , Nothing ) -> throwError NoPeerDHKey
_ -> throwError
$ ProtocolError WrongKeyID ""
ok <- case ( oKeyID == ourKeyID s
, oKeyID == ourKeyID s + 1
) of
(True, _) -> return $ ourCurrentKey s
(_, True) -> return $ nextDH s
_ -> throwError $ ProtocolError WrongKeyID ""
sharedSecret <- makeDHSharedSecret (priv ok) tck
let secBytes = encodeInteger sharedSecret
(sendByte, recvByte) = if tck <= pub ok
then (0x01, 0x02) :: (Word8, Word8)
else (0x02, 0x01)
let h1 b = hash (BS.singleton b `BS.append` secBytes)
-- TODO: Check against yabasta
sendEncKey <- h1 sendByte
sendMacKey <- hash sendEncKey
recvEncKey <- h1 recvByte
recvMacKey <- hash recvEncKey
return MK{ sendEncKey
, sendMacKey
, recvEncKey
, recvMacKey
}
encryptDataMessage :: BS.ByteString -> E2E g DataMessage
encryptDataMessage payload = do
s <- get
unless (isEncrypted $ msgState s) $ throwError (WrongState "encryptDataMessage")
mk <- makeMessageKeys (theirKeyID s) (ourKeyID s)
pl <- encCtr (sendEncKey mk) (encodeInteger $ counter s) payload
let msg = DM { senderKeyID = ourKeyID s
, recipientKeyID = theirKeyID s
, nextDHy = pub $ nextDH s
, ctrHi = encodeInteger $ counter s
, messageEnc = pl
, messageMAC = BS.empty
}
messageMAC <- mac (sendMacKey mk) (encodeMessageBytes msg)
put s{counter = counter s + 1}
return $ msg{messageMAC = messageMAC}
where
isEncrypted MsgStateEncrypted{} = True
isEncrypted _ = False
| Philonous/pontarius-xmpp-e2e | source/Pontarius/E2E/Message.hs | apache-2.0 | 4,327 | 0 | 15 | 1,595 | 1,218 | 612 | 606 | 95 | 8 |
{-# LANGUAGE DeriveGeneric #-}
--------------------------------------------------------------------
-- |
-- Copyright : (c) Dan Doel 2014
-- License : BSD2
-- Maintainer: Dan Doel <[email protected]>
-- Stability : experimental
-- Portability: non-portable
--------------------------------------------------------------------
module Ermine.Unification.Class
( ClassCheck(ClassCheck)
, instantiateClass
) where
import Bound
import Bound.Scope
import Bound.Var
import Control.Applicative
import Control.Lens
import Data.Map as Map
import Data.Text
import Data.Traversable
import Data.Void
import Ermine.Syntax.Class
import Ermine.Syntax.Global
import Ermine.Syntax.Hint
import Ermine.Syntax.Kind as Kind
import Ermine.Syntax.Type as Type
import Ermine.Syntax.Term as Term
import Ermine.Unification.Meta
import GHC.Generics
data ClassCheck s = ClassCheck
{ _cctparams :: [(Hint, KindM s)]
, _cccxt :: [Scope Int (Type (KindM s)) Text]
, _ccsigs :: Map Global (Type (KindM s) (Var Int Text))
, _ccdefs :: Map Global (Bodies (Annot Void Text) Void)
}
deriving (Eq, Show, Generic)
instantiateClass :: Class () Text -> M s (Schema (MetaK s), ClassCheck s)
instantiateClass cls = do
clazz@(Class ks ts cxt sigs defs) <- kindVars (\_ -> newShallowMeta 0 False Nothing) cls
mks <- for ks $ newMeta False
tks <- for ts $ \(h, _) -> (,) h . pure <$> newShallowMeta 0 False Nothing
return $ ( schema clazz
, ClassCheck
tks
(hoistScope (over kindVars $ pure . unvar (mks!!) id) <$> cxt)
(over kindVars (pure . unvar (mks!!) id) <$> sigs)
defs
)
| PipocaQuemada/ermine | src/Ermine/Unification/Class.hs | bsd-2-clause | 1,742 | 0 | 16 | 423 | 497 | 277 | 220 | 41 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Formalize.Html
( formHtml
, pdfHtml
) where
import Control.Monad.IO.Class (MonadIO)
import Data.Text.Lazy as LT (Text)
import Formalize.Types
import System.FilePath
import Text.Hastache
import Text.Hastache.Context
-- HTML for view containing the main form.
formHtml :: FormData -> IO LT.Text
formHtml = mustache "form.mustache" . mkGenericContext
-- HTML for the PDF to render.
pdfHtml :: FormData -> IO LT.Text
pdfHtml = mustache "pdf.mustache" . mkGenericContext
-- Location of view files.
viewFolder :: FilePath
viewFolder = "web/view"
-- Render mustache template.
mustache :: MonadIO m => FilePath -> MuContext m -> m LT.Text
mustache file = hastacheFile defaultConfig path
where path = viewFolder </> file
| Lepovirta/Crystallize | src/Formalize/Html.hs | bsd-3-clause | 844 | 0 | 9 | 196 | 179 | 101 | 78 | 19 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeFamilyDependencies #-}
module Mimir.Types where
import Control.Lens.Lens (Lens')
import Control.Lens.TH
import Control.Monad.Except (ExceptT)
import Control.Monad.Reader (ReaderT)
import Network.HTTP.Nano (HttpCfg, HttpError)
class HasExchange e r where
exchange :: Lens' r e
class Exchange e where
type ExchangeM e = (m :: * -> *) | m -> e
class Exchange e => TickerP e where
type TickerT e :: *
ticker :: ExchangeM e (TickerT e)
class Exchange e => SpotP e where
type SpotBalancesT e :: *
type SpotOrderT e :: *
type SpotOrderIDT e :: *
spotBalances :: ExchangeM e (SpotBalancesT e)
currentSpotOrders :: ExchangeM e [SpotOrderT e]
placeSpotOrder :: SpotOrderT e -> ExchangeM e (SpotOrderIDT e)
cancelSpotOrder :: SpotOrderIDT e -> ExchangeM e ()
type TradeM e = ReaderT (Ctx e) (ExceptT TradeError IO)
data Ctx e = Ctx {
_ctxHttpCfg :: HttpCfg,
_ctxExchange :: e
}
data TradeError
= THttpError HttpError
| TLogicError String
deriving Show
---
--- Standard data types
---
data Ticker = Ticker {
_tiTimeUTCMS :: Int,
_tiAsk :: Double,
_tiBid :: Double,
_tiLast :: Double
} deriving (Eq, Show)
type CandleInterval = Int
data Candle = Candle {
_caTimeUTC :: Int,
_caOpen :: Double,
_caClose :: Double,
_caHigh :: Double,
_caLow :: Double,
_caVolume :: Double
} deriving (Eq, Show)
data OrderBook = OrderBook {
_obBids :: [OrderBookEntry],
_obAsks :: [OrderBookEntry]
} deriving (Eq, Show)
data OrderBookEntry = OrderBookEntry {
_oeVolume :: Double,
_oePrice :: Double
} deriving (Eq, Show)
data Trade = Trade {
_trTimeUTCMS :: Int,
_trUnitPrice :: Double,
_trVolume :: Double,
_trType :: OrderType
} deriving (Eq, Show)
data Order = Order {
_oType :: OrderType,
_oID :: Int,
_oTimeUTCMS :: Int,
_oVolume :: Double,
_oUnitPrice :: Double
} deriving (Eq, Show)
data OrderType
= LIMIT_BUY
| LIMIT_SELL
| MARKET_BUY
| MARKET_SELL
deriving (Eq, Read, Show)
data OrderResponse = OrderResponse String deriving (Eq, Show)
data Balances = Balances {
_bCurrency :: Double,
_bCommodity :: Double
} deriving (Eq, Show)
makeLenses ''Ctx
makeClassyPrisms ''TradeError
makeLenses ''Ticker
makeLenses ''Candle
makeLenses ''OrderBook
makeLenses ''OrderBookEntry
makeLenses ''Trade
makeLenses ''Order
makeLenses ''Balances
| ralphmorton/Mimir | src/Mimir/Types.hs | bsd-3-clause | 2,550 | 1 | 10 | 560 | 762 | 431 | 331 | 90 | 0 |
{-# language ViewPatterns, ScopedTypeVariables, DeriveDataTypeable #-}
module Sorts.Robots.PathRobots.Path where
import Safe
import Data.Abelian
import Data.Typeable
import Data.Data
import Data.Accessor
import Physics.Chipmunk hiding (start, end)
import Graphics.Qt hiding (scale)
import Base
import Utils hiding (distance)
import Sorts.Robots.PathRobots.Configuration
import Editor.Scene.Rendering
import Editor.Scene.Rendering.Helpers
-- | Describes the path of a platform.
-- A platform path can be thought of as a cycle of nodes
-- (that make up a cycle of segments).
data Path
= Path {
segments :: [Segment],
distanceToGuidePoint :: CpFloat,
pathLength :: CpFloat
}
| SingleNode {
node :: Vector,
onState :: Maybe Path -- saves the path if the platform can be switched on
}
deriving (Show, Typeable)
mkPath :: Bool -> [Vector] -> Path
mkPath _ [] = error "empty paths are not allowed"
mkPath _ [n] = SingleNode n Nothing
mkPath active list =
(deleteConsecutiveTwins >>>
adjacentCyclic >>>
map (\ (a, b) -> segment a b) >>>
(\ segments -> Path segments 0 (sumLength segments)) >>>
wrap) list
where
-- deletes consecutive points in the path that are identical.
deleteConsecutiveTwins :: Eq a => [a] -> [a]
deleteConsecutiveTwins = mergeAdjacentCyclicPairs $
\ a b -> if a == b then Just a else Nothing
-- sums up all the segment's lengths
sumLength :: [Segment] -> CpFloat
sumLength = sum . map segmentLength
-- wraps the path in a SingleNode when platform is initially switched off
wrap = if active then id else SingleNode (head list) . Just
-- | returns the currently active segment
currentSegment :: Path -> Segment
currentSegment Path{segments = (a : _)} = a
data Segment = Segment {
start :: Vector,
end :: Vector,
segmentLength :: CpFloat
} deriving (Show, Typeable)
segment :: Vector -> Vector -> Segment
segment start end = Segment start end (len (end -~ start))
segmentToVector :: Segment -> Vector
segmentToVector segment = end segment -~ start segment
-- | returns the next path node
nextNode :: Path -> Vector
nextNode (segments -> (a : _)) = end a
lastNode :: Path -> Vector
lastNode (segments -> (a : _)) = start a
updatePath :: Path -> Path
updatePath =
updateGuide >>>
updateSegment
-- * guide point
-- The guide point is a point that moves on the path with a
-- constant velocity. It is guiding the movement of the platform.
-- It is described as the distance from (lastNode path) to the guide point
-- on the path.
-- | returns the guide point
guidePoint :: [Segment] -> CpFloat -> Vector
guidePoint segments distanceToGuidePoint =
inner (cycle segments) distanceToGuidePoint
where
inner (a : r) d =
if d < segmentLength a then
start a +~ scale (normalize $ segmentToVector a) d
else
inner r (d - segmentLength a)
-- | updates the guide with the configuration value for the platform speed
updateGuide :: Path -> Path
updateGuide p@SingleNode{} = p
updateGuide (Path segments@(segment : _) distance pathLength) =
Path segments newDistance pathLength
where
tmpNewDistance = distance + updateStepQuantum * platformStandardVelocity
newDistance =
if tmpNewDistance > segmentLength segment then
foldToRange
(segmentLength segment,
segmentLength segment + pathLength)
tmpNewDistance
else
tmpNewDistance
-- * segment switching
-- | The platform has an active segment at any time,
-- between (lastNode platform) and (nextNode platform).
-- This operation switches to the next segment if needed.
-- If a switch takes place, an impulse is applied to
-- smoothen behaviour at path nodes.
updateSegment :: Path -> Path
updateSegment p@SingleNode{} = p
updateSegment path@(Path (a : r) dtg pathLength) =
if dtg >= segmentLength a
then Path (r +: a) (dtg - segmentLength a) pathLength
else path
-- * force
-- | Applies a force to the path robot.
-- The force is composed of an antiGravity and a path force
-- that will let the platform follow its path.
applyPathRobotForce :: Chipmunk -> Path -> IO ()
applyPathRobotForce chip path = do
antiGravity <- getAntiGravity chip
motion <- getPathForce chip path
let force = antiGravity +~ motion
applyOnlyForce (body chip) force zero
return ()
-- | calculates the force that lets the platform hover
getAntiGravity :: Chipmunk -> IO Vector
getAntiGravity chip = do
m <- getMass chip
return (Vector 0 (- gravity * m))
-- | calculates the force that moves the platform to the next path node
getPathForce :: Chipmunk -> Path -> IO Vector
getPathForce chip path = do
m <- getMass chip
p <- getPosition chip
v <- get $ velocity $ body chip
return $ mkPathForce path m p v
-- | (pure) calculation of the path force.
mkPathForce :: Path -> Mass -> Vector -> Vector -> Vector
mkPathForce (SingleNode aim _) m p v =
springForce singleNodeSpringConfiguration m p v aim
mkPathForce (Path segments distanceToGuidePoint _) m p v = do
-- the force will always have the same length (or 0)
springForce pathSpringConfiguration m p v (guidePoint segments distanceToGuidePoint)
-- * spring simulation
-- | Simulates the attachment of the platforms to a spring.
springForce :: SpringConfiguration -> Mass -> Vector -> Vector -> Vector -> Vector
springForce conf mass position velocity aim =
force +~ drag
where
direction = normalizeIfNotZero (aim -~ position)
force = scale direction forceLen
forceLen = mass * toAimLen * springFactor
toAimLen = len (aim -~ position)
-- the acceleration should increase with lenToAim
-- till the springConstantAccelerationDistance is reached
springFactor =
springAcceleration conf / fromKachel 1
-- drag to let the swinging stop
drag = scale dragDirection dragLen
dragLen = constantDrag +~ dynamicDrag
constantDrag = frictionFactor conf * mass
dynamicDrag = dragFactor conf * mass
* len velocity / platformStandardVelocity
dragDirection = normalizeIfNotZero (negateAbelian velocity)
-- * object edit mode
oemMethods :: Size Double -> OEMMethods
oemMethods size = OEMMethods
(OEMState . initialState size)
(fmap OEMState . unpickle size)
data OEMPath = OEMPath {
oemRobotSize :: Size Double,
oemStepSize :: Int,
oemCursor_ :: EditorPosition,
pathPositions :: OEMPathPositions,
oemActive :: Bool
}
deriving (Show, Typeable, Data)
oemCursor :: Accessor OEMPath EditorPosition
oemCursor = accessor oemCursor_ (\ a r -> r{oemCursor_ = a})
instance IsOEMState OEMPath where
oemEnterMode _ = id
oemUpdate _ = updateOEMPath
oemNormalize _ = id
oemRender ptr app config = renderOEMState app config ptr
oemPickle (OEMPath _ _ cursor path active) =
show ((cursor, getPathList path, active) :: PickleType)
oemHelp = const oemHelpText
type PickleType = (EditorPosition, [EditorPosition], Bool)
-- last component saves, if the path robot is activated or not.
-- This means different things in different robots, though.
unpickle :: Size Double -> String -> Maybe OEMPath
unpickle size (readMay -> Just ((cursor, (start : path), active) :: PickleType)) =
Just $ OEMPath size (fromKachel 1) cursor (OEMPathPositions start path) active
unpickle _ _ = Nothing
-- | use the position of the object as first node in Path
initialState :: Size Double -> EditorPosition -> OEMPath
initialState size p = OEMPath size (fromKachel 1) p (OEMPathPositions p []) True
data OEMPathPositions =
OEMPathPositions {
startPosition :: EditorPosition,
positions :: [EditorPosition]
}
deriving (Show, Typeable, Data)
getPathList :: OEMPathPositions -> [EditorPosition]
getPathList (OEMPathPositions start path) = start : path
-- | Adds a point to the path.
addPathPoint :: EditorPosition -> OEMPathPositions -> OEMPathPositions
addPathPoint point (OEMPathPositions start path) =
OEMPathPositions start (path +: point)
-- | removes the last added point at the given position, if it exists.
removePathPoint :: EditorPosition -> OEMPathPositions -> OEMPathPositions
removePathPoint point (OEMPathPositions start path) =
OEMPathPositions start (reverse $ deleteNeedle point $ reverse path)
where
-- deletes the first occurence of a given element
deleteNeedle :: Eq a => a -> [a] -> [a]
deleteNeedle needle list = case span (/= needle) list of
(before, _needle : after) -> before ++ after
(before, []) -> before
-- * oem logic
updateOEMPath :: Button -> OEMPath -> OEMUpdateMonad OEMPath
updateOEMPath (KeyboardButton key _ _) oem@(OEMPath size cursorStep cursor path active) =
case key of
LeftArrow -> return $ oemCursor ^: (-~ EditorPosition cursorStepF 0) $ oem
RightArrow -> return $ oemCursor ^: (+~ EditorPosition cursorStepF 0) $ oem
UpArrow -> return $ oemCursor ^: (-~ EditorPosition 0 cursorStepF) $ oem
DownArrow -> return $ oemCursor ^: (+~ EditorPosition 0 cursorStepF) $ oem
-- append new path node
k | isEditorA k -> return $ OEMPath size cursorStep cursor (addPathPoint cursor path) active
-- delete path node
k | isEditorB k -> return $ OEMPath size cursorStep cursor (removePathPoint cursor path) active
W -> return $ oem{oemStepSize = cursorStep * 2}
S -> return $ oem{oemStepSize = max 1 (cursorStep `div` 2)}
Space -> return $ oem{oemActive = not active}
_ -> oemNothing
where
cursorStepF :: Double = fromIntegral cursorStep
updateOEMPath _ _ = oemNothing
renderOEMState :: Sort sort a => Application -> Configuration -> Ptr QPainter
-> EditorScene sort -> OEMPath -> IO ()
renderOEMState app config ptr scene
(OEMPath robotSize stepSize cursor pathPositions oemActive) = do
offset <- transformation ptr cursor robotSize
renderScene offset
renderCursor offset
let stepSizeF = fromIntegral stepSize
renderCursorStepSize app config ptr $ EditorPosition stepSizeF stepSizeF
where
renderScene offset =
renderObjectScene ptr offset scene
renderCursor offset =
drawColoredBox ptr (epToPosition robotSize cursor +~ offset) robotSize 4 yellow
renderOEMPath :: Size Double -> Ptr QPainter -> Offset Double -> [EditorPosition]
-> IO ()
renderOEMPath size ptr offset paths = do
setPenColor ptr green 4
mapM_ (renderLine size ptr) (adjacentCyclic paths)
mapM_ (drawPathNode size ptr) paths
renderLine :: Size Double -> Ptr QPainter -> (EditorPosition, EditorPosition) -> IO ()
renderLine size ptr (a, b) =
drawLine ptr (epToCenterPosition size a) (epToCenterPosition size b)
drawPathNode :: Size Double -> Ptr QPainter -> EditorPosition -> IO ()
drawPathNode size ptr n =
fillRect ptr (epToPosition size n)
size
(alpha ^: (* 0.4) $ yellow)
-- * oem help text
oemHelpText :: String =
"Arrow keys: move cursor\n" ++
"Ctrl: add new path node\n" ++
"Shift: remove existing node from path\n" ++
"Space: change initial state of platform (on / off)\n" ++
"W, S: change cursor step size"
| geocurnoff/nikki | src/Sorts/Robots/PathRobots/Path.hs | lgpl-3.0 | 11,279 | 0 | 13 | 2,511 | 2,938 | 1,533 | 1,405 | 212 | 10 |
module MediaWiki.API.Query.AllUsers.Import where
import MediaWiki.API.Types
import MediaWiki.API.Utils
import MediaWiki.API.Query.AllUsers
import Text.XML.Light.Types
import Control.Monad
import Data.Maybe
stringXml :: String -> Either (String,[{-Error msg-}String]) AllUsersResponse
stringXml s = parseDoc xml s
xml :: Element -> Maybe AllUsersResponse
xml e = do
guard (elName e == nsName "api")
let es1 = children e
p <- pNode "query" es1
let es = children p
ps <- fmap (mapMaybe xmlUser) (fmap children $ pNode "allusers" es)
let cont = pNode "query-continue" es1 >>= xmlContinue "allusers" "aufrom"
return emptyAllUsersResponse{auUsers=ps,auContinue=cont}
xmlUser :: Element -> Maybe (UserName,Maybe Int, Maybe String)
xmlUser e = do
guard (elName e == nsName "u")
let ns = fromMaybe "0" $ pAttr "ns" e
let nm = fromMaybe "" $ pAttr "name" e
let ec = pAttr "editcount" e >>= \ x -> case reads x of { ((v,_):_) -> Just v; _ -> Nothing}
let grps = pAttr "groups" e
return (nm,ec,grps)
| HyperGainZ/neobot | mediawiki/MediaWiki/API/Query/AllUsers/Import.hs | bsd-3-clause | 1,042 | 0 | 17 | 204 | 419 | 210 | 209 | 26 | 2 |
{-# LANGUAGE QuasiQuotes #-}
module Main where
import Control.Monad
import System.Environment
import Language.C
import Language.C.System.GCC
import Text.Printf
import Text.PrettyPrint.HughesPJ
--import Here (here)
main = do
-- this is not the prettiest, but easiest solution
let depth = 2
putStrLn "#include <stdio.h>"
print $ pretty $ parseCExtDecl $ show $
text "int main(int argc, char**argv)" $+$
(braces $
stat_embed depth (stat1 depth) $+$
stat_embed depth (stat2 depth) $+$
text "return(0);")
parseCStat :: String -> CStat
parseCStat s = either (error.show) id $ execParser_ statementP (inputStreamFromString s) (initPos "<stdin>")
parseCExtDecl :: String -> CExtDecl
parseCExtDecl s = either (error.show) id $ execParser_ extDeclP (inputStreamFromString s) (initPos "<stdin>")
stat_embed :: Int -> CStat -> Doc
stat_embed k stat = braces $ nest 2 $
decls $+$
text "int r = 0;" $+$
iteropen $+$
(nest 2 stmt) $+$
(nest 2 $ text "printf(\"%d\\n\",r);") $+$
iterclose
where
stmt = pretty stat
decls = vcat $ map (\n -> text "int" <+> text(guardName n) <> semi) [1..k]
iteropen = vcat $ map (\n -> let gn = guardName n in text (printf "for(%s=0;%s<=1;%s++){" gn gn gn)) [1..k]
iterclose = vcat $ replicate k (char '}')
guardName n = "g_"++show n
setR :: Int -> CStat
setR k = parseCStat $ printf "r = %d;" k
stat1 :: Int -> CStatement NodeInfo
stat1 depth = go depth
where
go depth =
case depth of
n | n <= 1 -> CIf (guard n) (setR 1) (Just$ setR 2) u
| otherwise -> CIf (guard n) (go (n-1)) Nothing u
cexpr = CExpr . Just
vexpr s = CVar (internalIdent s) u
guard n = vexpr (guardName n)
u = undefNode
stat2 :: Int -> CStatement NodeInfo
stat2 depth = CIf (guard depth) (go (depth-1)) (Just$ setR 2) u
where
go n | n == 0 = setR 1
| otherwise = CIf (guard n) (go (n-1)) Nothing u
cexpr = CExpr . Just
vexpr s = CVar (internalIdent s) u
guard n = vexpr (guardName n)
u = undefNode
| llelf/language-c | test/harness/bug31_pp_if_else/Test.hs | bsd-3-clause | 2,083 | 0 | 15 | 532 | 817 | 407 | 410 | 54 | 1 |
{-# LANGUAGE DeriveDataTypeable, TypeFamilies, TemplateHaskell #-}
module Distribution.Server.Features.PackageCandidates.State where
import Distribution.Server.Features.PackageCandidates.Types
import Distribution.Server.Framework.MemSize
import qualified Distribution.Server.Packages.PackageIndex as PackageIndex
import Distribution.Package
import Data.Acid (Query, Update, makeAcidic)
import Data.SafeCopy (deriveSafeCopy, base)
import Data.Typeable
import Control.Monad.Reader
import qualified Control.Monad.State as State
import Data.Monoid
---------------------------------- Index of candidate tarballs and metadata
-- boilerplate code based on PackagesState
data CandidatePackages = CandidatePackages {
candidateList :: !(PackageIndex.PackageIndex CandPkgInfo)
} deriving (Typeable, Show, Eq)
deriveSafeCopy 0 'base ''CandidatePackages
instance MemSize CandidatePackages where
memSize (CandidatePackages a) = memSize1 a
initialCandidatePackages :: CandidatePackages
initialCandidatePackages = CandidatePackages {
candidateList = mempty
}
replaceCandidate :: CandPkgInfo -> Update CandidatePackages ()
replaceCandidate pkg = State.modify $ \candidates -> candidates { candidateList = replaceVersions (candidateList candidates) }
where replaceVersions = PackageIndex.insert pkg . PackageIndex.deletePackageName (packageName pkg)
addCandidate :: CandPkgInfo -> Update CandidatePackages ()
addCandidate pkg = State.modify $ \candidates -> candidates { candidateList = addVersion (candidateList candidates) }
where addVersion = PackageIndex.insert pkg
deleteCandidate :: PackageId -> Update CandidatePackages ()
deleteCandidate pkg = State.modify $ \candidates -> candidates { candidateList = deleteVersion (candidateList candidates) }
where deleteVersion = PackageIndex.deletePackageId pkg
deleteCandidates :: PackageName -> Update CandidatePackages ()
deleteCandidates pkg = State.modify $ \candidates -> candidates { candidateList = deleteVersions (candidateList candidates) }
where deleteVersions = PackageIndex.deletePackageName pkg
-- |Replace all existing packages and reports
replaceCandidatePackages :: CandidatePackages -> Update CandidatePackages ()
replaceCandidatePackages = State.put
getCandidatePackages :: Query CandidatePackages CandidatePackages
getCandidatePackages = ask
makeAcidic ''CandidatePackages ['getCandidatePackages
,'replaceCandidatePackages
,'replaceCandidate
,'addCandidate
,'deleteCandidate
,'deleteCandidates
]
| mpickering/hackage-server | Distribution/Server/Features/PackageCandidates/State.hs | bsd-3-clause | 2,683 | 0 | 12 | 476 | 535 | 299 | 236 | 45 | 1 |
{- $Id: AFRPTestsCOC.hs,v 1.2 2003/11/10 21:28:58 antony Exp $
******************************************************************************
* A F R P *
* *
* Module: AFRPTestsCOC *
* Purpose: Test cases for collection-oriented combinators *
* Authors: Antony Courtney and Henrik Nilsson *
* *
* Copyright (c) Yale University, 2003 *
* *
******************************************************************************
-}
module AFRPTestsCOC (coc_tr, coc_trs) where
import FRP.Yampa
import AFRPTestsCommon
------------------------------------------------------------------------------
-- Test cases for collection-oriented combinators
------------------------------------------------------------------------------
coc_inp1 = deltaEncode 0.1 [0.0, 0.5 ..]
coc_t0 :: [[Double]]
coc_t0 = take 20 $ embed (parB [constant 1.0, identity, integral]) coc_inp1
coc_t0r =
[[1.0, 0.0, 0.00],
[1.0, 0.5, 0.00],
[1.0, 1.0, 0.05],
[1.0, 1.5, 0.15],
[1.0, 2.0, 0.30],
[1.0, 2.5, 0.50],
[1.0, 3.0, 0.75],
[1.0, 3.5, 1.05],
[1.0, 4.0, 1.40],
[1.0, 4.5, 1.80],
[1.0, 5.0, 2.25],
[1.0, 5.5, 2.75],
[1.0, 6.0, 3.30],
[1.0, 6.5, 3.90],
[1.0, 7.0, 4.55],
[1.0, 7.5, 5.25],
[1.0, 8.0, 6.00],
[1.0, 8.5, 6.80],
[1.0, 9.0, 7.65],
[1.0, 9.5, 8.55]]
coc_trs =
[ coc_t0 ~= coc_t0r
]
coc_tr = and coc_trs
| meimisaki/Yampa | tests/AFRPTestsCOC.hs | bsd-3-clause | 1,770 | 0 | 10 | 658 | 352 | 226 | 126 | 30 | 1 |
--
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements. See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership. The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied. See the License for the
-- specific language governing permissions and limitations
-- under the License.
--
module Thrift
( module Thrift.Transport
, module Thrift.Protocol
, AppExnType(..)
, AppExn(..)
, readAppExn
, writeAppExn
, ThriftException(..)
) where
import Control.Monad ( when )
import Control.Exception
import Data.Typeable ( Typeable )
import Thrift.Transport
import Thrift.Protocol
data ThriftException = ThriftException
deriving ( Show, Typeable )
instance Exception ThriftException
data AppExnType
= AE_UNKNOWN
| AE_UNKNOWN_METHOD
| AE_INVALID_MESSAGE_TYPE
| AE_WRONG_METHOD_NAME
| AE_BAD_SEQUENCE_ID
| AE_MISSING_RESULT
deriving ( Eq, Show, Typeable )
instance Enum AppExnType where
toEnum 0 = AE_UNKNOWN
toEnum 1 = AE_UNKNOWN_METHOD
toEnum 2 = AE_INVALID_MESSAGE_TYPE
toEnum 3 = AE_WRONG_METHOD_NAME
toEnum 4 = AE_BAD_SEQUENCE_ID
toEnum 5 = AE_MISSING_RESULT
fromEnum AE_UNKNOWN = 0
fromEnum AE_UNKNOWN_METHOD = 1
fromEnum AE_INVALID_MESSAGE_TYPE = 2
fromEnum AE_WRONG_METHOD_NAME = 3
fromEnum AE_BAD_SEQUENCE_ID = 4
fromEnum AE_MISSING_RESULT = 5
data AppExn = AppExn { ae_type :: AppExnType, ae_message :: String }
deriving ( Show, Typeable )
instance Exception AppExn
writeAppExn :: (Protocol p, Transport t) => p t -> AppExn -> IO ()
writeAppExn pt ae = do
writeStructBegin pt "TApplicationException"
when (ae_message ae /= "") $ do
writeFieldBegin pt ("message", T_STRING , 1)
writeString pt (ae_message ae)
writeFieldEnd pt
writeFieldBegin pt ("type", T_I32, 2);
writeI32 pt (fromEnum (ae_type ae))
writeFieldEnd pt
writeFieldStop pt
writeStructEnd pt
readAppExn :: (Protocol p, Transport t) => p t -> IO AppExn
readAppExn pt = do
readStructBegin pt
rec <- readAppExnFields pt (AppExn {ae_type = undefined, ae_message = undefined})
readStructEnd pt
return rec
readAppExnFields pt rec = do
(n, ft, id) <- readFieldBegin pt
if ft == T_STOP
then return rec
else case id of
1 -> if ft == T_STRING then
do s <- readString pt
readAppExnFields pt rec{ae_message = s}
else do skip pt ft
readAppExnFields pt rec
2 -> if ft == T_I32 then
do i <- readI32 pt
readAppExnFields pt rec{ae_type = (toEnum i)}
else do skip pt ft
readAppExnFields pt rec
_ -> do skip pt ft
readFieldEnd pt
readAppExnFields pt rec
| ajayanandgit/mbunit-v3 | tools/Thrift/src/lib/hs/src/Thrift.hs | apache-2.0 | 3,468 | 1 | 18 | 1,000 | 761 | 393 | 368 | 76 | 6 |
{-# LANGUAGE CPP #-}
#ifdef TRUSTWORTHY
{-# LANGUAGE Trustworthy #-}
#endif
#ifndef MIN_VERSION_parallel
#define MIN_VERSION_parallel(x,y,z) (defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL > 700)
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Control.Parallel.Strategies.Lens
-- Copyright : (C) 2012-2015 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : provisional
-- Portability : portable
--
-- A 'Lens' or 'Traversal' can be used to take the role of 'Traversable' in
-- @Control.Parallel.Strategies@, enabling those combinators to work with
-- monomorphic containers.
----------------------------------------------------------------------------
module Control.Parallel.Strategies.Lens
( evalOf
, parOf
, after
, throughout
) where
import Control.Lens
import Control.Parallel.Strategies
-- | Evaluate the targets of a 'Lens' or 'Traversal' into a data structure
-- according to the given 'Strategy'.
--
-- @
-- 'evalTraversable' = 'evalOf' 'traverse' = 'traverse'
-- 'evalOf' = 'id'
-- @
--
-- @
-- 'evalOf' :: 'Lens'' s a -> 'Strategy' a -> 'Strategy' s
-- 'evalOf' :: 'Traversal'' s a -> 'Strategy' a -> 'Strategy' s
-- 'evalOf' :: (a -> 'Eval' a) -> s -> 'Eval' s) -> 'Strategy' a -> 'Strategy' s
-- @
evalOf :: LensLike' Eval s a -> Strategy a -> Strategy s
evalOf l = l
{-# INLINE evalOf #-}
-- | Evaluate the targets of a 'Lens' or 'Traversal' according into a
-- data structure according to a given 'Strategy' in parallel.
--
-- @'parTraversable' = 'parOf' 'traverse'@
--
-- @
-- 'parOf' :: 'Lens'' s a -> 'Strategy' a -> 'Strategy' s
-- 'parOf' :: 'Traversal'' s a -> 'Strategy' a -> 'Strategy' s
-- 'parOf' :: ((a -> 'Eval' a) -> s -> 'Eval' s) -> 'Strategy' a -> 'Strategy' s
-- @
parOf :: LensLike' Eval s a -> Strategy a -> Strategy s
#if MIN_VERSION_parallel(3,2,0)
parOf l s = l (rparWith s)
#else
parOf l s = l (rpar `dot` s)
#endif
{-# INLINE parOf #-}
-- | Transform a 'Lens', 'Fold', 'Getter', 'Setter' or 'Traversal' to
-- first evaluates its argument according to a given 'Strategy' /before/ proceeding.
--
-- @
-- 'after' 'rdeepseq' 'traverse' :: 'Traversable' t => 'Strategy' a -> 'Strategy' [a]
-- @
after :: Strategy s -> LensLike f s t a b -> LensLike f s t a b
after s l f = l f $| s
{-# INLINE after #-}
-- | Transform a 'Lens', 'Fold', 'Getter', 'Setter' or 'Traversal' to
-- evaluate its argument according to a given 'Strategy' /in parallel with/ evaluating.
--
-- @
-- 'throughout' 'rdeepseq' 'traverse' :: 'Traversable' t => 'Strategy' a -> 'Strategy' [a]
-- @
throughout :: Strategy s -> LensLike f s t a b -> LensLike f s t a b
throughout s l f = l f $|| s
{-# INLINE throughout #-}
| rpglover64/lens | src/Control/Parallel/Strategies/Lens.hs | bsd-3-clause | 2,786 | 0 | 7 | 495 | 288 | 175 | 113 | 20 | 1 |
module Package06e where
import HsTypes
import UniqFM
| urbanslug/ghc | testsuite/tests/package/package06e.hs | bsd-3-clause | 53 | 0 | 3 | 7 | 10 | 7 | 3 | 3 | 0 |
-- !!! Infix record constructor.
module ShouldCompile where
data Rec = (:<-:) { a :: Int, b :: Float }
| urbanslug/ghc | testsuite/tests/parser/should_compile/read010.hs | bsd-3-clause | 104 | 5 | 5 | 21 | 31 | 19 | 12 | 2 | 0 |
{-# OPTIONS_GHC -funbox-strict-fields #-}
import Data.List
data Vec4 = Vec4 !Float !Float !Float !Float
main :: IO ()
main = print traceList
traceList = concatMap (\(x,y) -> let (r,g,b,a) = getPixel (x,y) in [r,g,b,a])
[(0,0)]
where
getPixel (x,y) = (red,green,blue,alpha)
where
Vec4 fr fg fb fa = seq x (Vec4 1 2 3 4)
red = round fr
green = round fg
blue = round fb
alpha = round fa
| sdiehl/ghc | testsuite/tests/codeGen/should_run/T1852.hs | bsd-3-clause | 448 | 0 | 13 | 134 | 216 | 116 | 100 | 21 | 1 |
{-# Language TypeFamilies #-}
module SparseMap
(SparseMap(..)
, trueList
, falseList
, fromList
, index
, insert
, constant) where
import Ersatz
import ChooseBit
import Prelude hiding ((&&), (||), not)
import Control.Applicative
import Data.Monoid ((<>))
import Data.Map (Map)
import qualified Data.Map as Map
data SparseMap k v = SparseMap !(Map k v) v
deriving (Read, Show)
instance (Ord k, ChooseBit v) => ChooseBit (SparseMap k v) where
chooseBit x y b = (\l r -> chooseBit l r b) <$> x <*> y
instance Functor (SparseMap k) where
fmap f (SparseMap m x) = SparseMap (f <$> m) (f x)
instance Foldable (SparseMap k) where
foldMap f (SparseMap m x) = foldMap f m <> f x
instance Traversable (SparseMap k) where
traverse f (SparseMap m x) = SparseMap <$> traverse f m <*> f x
instance Ord k => Applicative (SparseMap k) where
pure = constant
SparseMap fs f <*> SparseMap xs x = SparseMap merged (f x)
where
merged = Map.mergeWithKey
(\_ g y -> Just (g y))
(($ x) <$>)
(f <$>)
fs xs
fromList :: Ord k => v -> [(k,v)] -> SparseMap k v
fromList def xs = SparseMap (Map.fromList xs) def
trueList :: (Boolean v, Ord k) => [k] -> SparseMap k v
trueList xs = SparseMap (Map.fromList [ (x,true) | x <- xs ]) false
falseList :: (Boolean v, Ord k) => [k] -> SparseMap k v
falseList xs = SparseMap (Map.fromList [ (x,false) | x <- xs ]) true
index :: Ord k => k -> SparseMap k v -> v
index i (SparseMap m x) = Map.findWithDefault x i m
insert :: Ord k => k -> v -> SparseMap k v -> SparseMap k v
insert k v (SparseMap m x) = SparseMap (Map.insert k v m) x
foldSparseMap :: (v -> v -> v) -> SparseMap k v -> v
foldSparseMap f (SparseMap m x) = foldr f x m
instance (Equatable v, Ord k) => Equatable (SparseMap k v) where
x === y = foldSparseMap (&&) (liftA2 (===) x y)
instance (Ord k, Boolean v) => Boolean (SparseMap k v) where
bool = constant . bool
(&&) = liftA2 (&&)
(||) = liftA2 (||)
xor = liftA2 xor
not = fmap not
any f = foldr (\y ys -> f y || ys) false
all f = foldr (\y ys -> f y && ys) true
constant :: v -> SparseMap k v
constant = SparseMap Map.empty
instance Codec v => Codec (SparseMap k v) where
type Decoded (SparseMap k v) = SparseMap k (Decoded v)
encode = fmap encode
decode sol = traverse (decode sol)
| glguy/5puzzle | src/SparseMap.hs | isc | 2,372 | 0 | 13 | 598 | 1,121 | 587 | 534 | 64 | 1 |
module Language.Go.Token where
import Language.Go.SrcLocation
data Token
= IdentifierToken { tokenLiteral :: !String, tokenSpan :: !SrcSpan }
-- Literals
| IntToken { tokenLiteral :: !String, tokenSpan :: !SrcSpan }
| FloatToken { tokenLiteral :: !String, tokenSpan :: !SrcSpan }
| ImaginaryToken { tokenLiteral :: !String, tokenSpan :: !SrcSpan }
| RuneToken { tokenLiteral :: !String, tokenSpan :: !SrcSpan }
| StringToken { tokenLiteral :: !String, tokenSpan :: !SrcSpan }
-- Keywords
| BreakToken { tokenSpan :: !SrcSpan }
| CaseToken { tokenSpan :: !SrcSpan }
| ChanToken { tokenSpan :: !SrcSpan }
| ConstToken { tokenSpan :: !SrcSpan }
| ContinueToken { tokenSpan :: !SrcSpan }
| DefaultToken { tokenSpan :: !SrcSpan }
| DeferToken { tokenSpan :: !SrcSpan }
| ElseToken { tokenSpan :: !SrcSpan }
| FallthroughToken { tokenSpan :: !SrcSpan }
| ForToken { tokenSpan :: !SrcSpan }
| FuncToken { tokenSpan :: !SrcSpan }
| GoToken { tokenSpan :: !SrcSpan }
| GotoToken { tokenSpan :: !SrcSpan }
| IfToken { tokenSpan :: !SrcSpan }
| ImportToken { tokenSpan :: !SrcSpan }
| InterfaceToken { tokenSpan :: !SrcSpan }
| MapToken { tokenSpan :: !SrcSpan }
| PackageToken { tokenSpan :: !SrcSpan }
| RangeToken { tokenSpan :: !SrcSpan }
| ReturnToken { tokenSpan :: !SrcSpan }
| SelectToken { tokenSpan :: !SrcSpan }
| StructToken { tokenSpan :: !SrcSpan }
| SwitchToken { tokenSpan :: !SrcSpan }
| TypeToken { tokenSpan :: !SrcSpan }
| VarToken { tokenSpan :: !SrcSpan }
-- Operators and delimiters
| PlusToken { tokenSpan :: !SrcSpan } -- '+'
| MinusToken { tokenSpan :: !SrcSpan } -- '-'
| MultToken { tokenSpan :: !SrcSpan } -- '*'
| DivToken { tokenSpan :: !SrcSpan } -- '/'
| ModuloToken { tokenSpan :: !SrcSpan } -- '%'
| BinaryAndToken { tokenSpan :: !SrcSpan } -- '&'
| BinaryOrToken { tokenSpan :: !SrcSpan } -- '|'
| BinaryXorToken { tokenSpan :: !SrcSpan } -- '^'
| BinaryShiftLeftToken { tokenSpan :: !SrcSpan } -- '<<'
| BinaryShiftRightToken { tokenSpan :: !SrcSpan } -- '>>'
| BinaryAndNotToken { tokenSpan :: !SrcSpan } -- '&^'
| AndToken { tokenSpan :: !SrcSpan } -- '&&'
| OrToken { tokenSpan :: !SrcSpan } -- '||'
| ArrowToken { tokenSpan :: !SrcSpan } -- '<-'
| IncToken { tokenSpan :: !SrcSpan } -- '++'
| DecToken { tokenSpan :: !SrcSpan } -- '--'
| EqualityToken { tokenSpan :: !SrcSpan } -- '=='
| LessThanToken { tokenSpan :: !SrcSpan } -- '<'
| GreaterThanToken { tokenSpan :: !SrcSpan } -- '>'
| AssignToken { tokenSpan :: !SrcSpan } -- '='
| NotToken { tokenSpan :: !SrcSpan } -- '!'
| NotEqualsToken { tokenSpan :: !SrcSpan } -- '!='
| LessThanEqualsToken { tokenSpan :: !SrcSpan } -- '<='
| GreaterThanEqualsToken { tokenSpan :: !SrcSpan } -- '>='
| DefineToken { tokenSpan :: !SrcSpan } -- ':='
| EllipsisToken { tokenSpan :: !SrcSpan } -- '...'
| LeftRoundBracketToken { tokenSpan :: !SrcSpan } -- '('
| RightRoundBracketToken { tokenSpan :: !SrcSpan } -- ')'
| LeftSquareBracketToken { tokenSpan :: !SrcSpan } -- '['
| RightSquareBracketToken { tokenSpan :: !SrcSpan } -- ']'
| LeftCurlyBracketToken { tokenSpan :: !SrcSpan } -- '{'
| RightCurlyBracketToken { tokenSpan :: !SrcSpan } -- '}'
| CommaToken { tokenSpan :: !SrcSpan } -- ','
| DotToken { tokenSpan :: !SrcSpan } -- '.'
| SemicolonToken { tokenSpan :: !SrcSpan } -- ';'
| ColonToken { tokenSpan :: !SrcSpan } -- ':'
| PlusAssignToken { tokenSpan :: !SrcSpan } -- '+='
| MinusAssignToken { tokenSpan :: !SrcSpan } -- '-='
| MultAssignToken { tokenSpan :: !SrcSpan } -- '*='
| DivAssignToken { tokenSpan :: !SrcSpan } -- '/='
| ModuloAssignToken { tokenSpan :: !SrcSpan } -- '%='
| BinaryAndAssignToken { tokenSpan :: !SrcSpan } -- '&='
| BinaryOrAssignToken { tokenSpan :: !SrcSpan } -- '|='
| BinaryXorAssignToken { tokenSpan :: !SrcSpan } -- '^='
| BinaryShiftLeftAssignToken { tokenSpan :: !SrcSpan } -- '<<='
| BinaryShiftRightAssignToken { tokenSpan :: !SrcSpan } -- '>>='
| BinaryAndNotAssignToken { tokenSpan :: !SrcSpan } -- '&^='
deriving (Eq, Ord, Show)
| codeq/language-go | src/Language/Go/Token.hs | mit | 4,170 | 0 | 9 | 894 | 1,060 | 639 | 421 | 250 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE CPP #-}
module Network.HTTP.ReverseProxy.Rewrite
( ReverseProxyConfig (..)
, RewriteRule (..)
, RPEntry (..)
, simpleReverseProxy
)
where
import Control.Applicative
import Control.Exception (bracket)
import Data.Function (fix)
import Data.Monoid ((<>))
import qualified Data.Set as Set
import Data.Set (Set)
import qualified Data.Map as Map
import Data.Map ( Map )
import Data.Array ((!))
import Data.Aeson
import Control.Monad (unless)
import qualified Data.ByteString as S
import qualified Data.Text as T
import Data.Text (Text)
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
import qualified Data.CaseInsensitive as CI
import Blaze.ByteString.Builder (fromByteString)
import Keter.Types.Common
-- Configuration files
import Data.Default
-- Regular expression parsing, replacement, matching
import Data.Attoparsec.Text (string, takeWhile1, endOfInput, parseOnly, Parser)
import Text.Regex.TDFA (makeRegex, matchOnceText, MatchText)
import Text.Regex.TDFA.String (Regex)
import Data.Char (isDigit)
-- Reverse proxy apparatus
import qualified Network.Wai as Wai
import qualified Network.Wai.Internal as I
import Network.HTTP.Client.Conduit
import qualified Network.HTTP.Client as NHC
import Network.HTTP.Types
data RPEntry = RPEntry
{ config :: ReverseProxyConfig
, httpManager :: Manager
}
instance Show RPEntry where
show x = "RPEntry { config = " ++ (show $ config x) ++ " }"
getGroup :: MatchText String -> Int -> String
getGroup matches i = fst $ matches ! i
rewrite :: (String, MatchText String, String) -> String -> String -> Text
rewrite (before, match, after) input replacement =
case parseOnly parseSubstitute (T.pack replacement) of
Left _ -> T.pack input
Right result -> T.pack before <> result <> T.pack after
where
parseSubstitute :: Parser Text
parseSubstitute =
(endOfInput >> "")
<|> do
{ _ <- string "\\\\"
; rest <- parseSubstitute
; return $ "\\" <> rest
}
<|> do
{ _ <- string "\\"
; n <- (fmap (read . T.unpack) $ takeWhile1 isDigit) :: Parser Int
; rest <- parseSubstitute
; return $ T.pack (getGroup match n) <> rest
}
<|> do
{ text <- takeWhile1 (/= '\\')
; rest <- parseSubstitute
; return $ text <> rest
}
rewriteHeader :: Map HeaderName RewriteRule -> Header -> Header
rewriteHeader rules header@(name, value) =
case Map.lookup name rules of
Nothing -> header
Just r -> (name, regexRewrite r value)
rewriteHeaders :: Map HeaderName RewriteRule -> [Header] -> [Header]
rewriteHeaders ruleMap = map (rewriteHeader ruleMap)
regexRewrite :: RewriteRule -> S.ByteString -> S.ByteString
regexRewrite (RewriteRule _ regex' replacement) input =
case matchOnceText regex strInput of
Just match -> encodeUtf8 $ rewrite match strInput strReplacement
Nothing -> input
where
strRegex = T.unpack regex'
regex :: Regex
regex = makeRegex strRegex
strInput = T.unpack . decodeUtf8 $ input
strReplacement = T.unpack replacement
filterHeaders :: [Header] -> [Header]
filterHeaders = filter useHeader
where
useHeader ("Transfer-Encoding", _) = False
useHeader ("Content-Length", _) = False
useHeader ("Host", _) = False
useHeader _ = True
mkRuleMap :: Set RewriteRule -> Map HeaderName RewriteRule
mkRuleMap = Map.fromList . map (\k -> (CI.mk . encodeUtf8 $ ruleHeader k, k)) . Set.toList
mkRequest :: ReverseProxyConfig -> Wai.Request -> Request
mkRequest rpConfig request =
#if MIN_VERSION_http_client(0, 5, 0)
NHC.defaultRequest
{ NHC.checkResponse = \_ _ -> return ()
, NHC.responseTimeout = maybe NHC.responseTimeoutNone NHC.responseTimeoutMicro $ reverseTimeout rpConfig
#else
def
{ NHC.checkStatus = \_ _ _ -> Nothing
, NHC.responseTimeout = reverseTimeout rpConfig
#endif
, method = Wai.requestMethod request
, secure = reversedUseSSL rpConfig
, host = encodeUtf8 $ reversedHost rpConfig
, port = reversedPort rpConfig
, path = Wai.rawPathInfo request
, queryString = Wai.rawQueryString request
, requestHeaders = filterHeaders $ rewriteHeaders reqRuleMap (Wai.requestHeaders request)
, requestBody =
case Wai.requestBodyLength request of
Wai.ChunkedBody -> RequestBodyStreamChunked ($ I.getRequestBodyChunk request)
Wai.KnownLength n -> RequestBodyStream (fromIntegral n) ($ I.getRequestBodyChunk request)
, decompress = const False
, redirectCount = 0
, cookieJar = Nothing
, requestVersion = Wai.httpVersion request
}
where
reqRuleMap = mkRuleMap $ rewriteRequestRules rpConfig
simpleReverseProxy :: Manager -> ReverseProxyConfig -> Wai.Application
simpleReverseProxy mgr rpConfig request sendResponse = bracket
(NHC.responseOpen proxiedRequest mgr)
responseClose
$ \res -> sendResponse $ Wai.responseStream
(responseStatus res)
(rewriteHeaders respRuleMap $ responseHeaders res)
(sendBody $ responseBody res)
where
proxiedRequest = mkRequest rpConfig request
respRuleMap = mkRuleMap $ rewriteResponseRules rpConfig
sendBody body send _flush = fix $ \loop -> do
bs <- body
unless (S.null bs) $ do
() <- send $ fromByteString bs
loop
data ReverseProxyConfig = ReverseProxyConfig
{ reversedHost :: Text
, reversedPort :: Int
, reversedUseSSL :: Bool
, reversingHost :: Text
, reversingUseSSL :: !SSLConfig
, reverseTimeout :: Maybe Int
, rewriteResponseRules :: Set RewriteRule
, rewriteRequestRules :: Set RewriteRule
} deriving (Eq, Ord, Show)
instance FromJSON ReverseProxyConfig where
parseJSON (Object o) = ReverseProxyConfig
<$> o .: "reversed-host"
<*> o .: "reversed-port"
<*> o .: "reversed-ssl" .!= False
<*> o .: "reversing-host"
<*> o .:? "ssl" .!= SSLFalse
<*> o .:? "timeout" .!= Nothing
<*> o .:? "rewrite-response" .!= Set.empty
<*> o .:? "rewrite-request" .!= Set.empty
parseJSON _ = fail "Wanted an object"
instance ToJSON ReverseProxyConfig where
toJSON ReverseProxyConfig {..} = object
[ "reversed-host" .= reversedHost
, "reversed-port" .= reversedPort
, "reversed-ssl" .= reversedUseSSL
, "reversing-host" .= reversingHost
, "ssl" .= reversingUseSSL
, "timeout" .= reverseTimeout
, "rewrite-response" .= rewriteResponseRules
, "rewrite-request" .= rewriteRequestRules
]
instance Default ReverseProxyConfig where
def = ReverseProxyConfig
{ reversedHost = ""
, reversedPort = 80
, reversedUseSSL = False
, reversingHost = ""
, reversingUseSSL = SSLFalse
, reverseTimeout = Nothing
, rewriteResponseRules = Set.empty
, rewriteRequestRules = Set.empty
}
data RewriteRule = RewriteRule
{ ruleHeader :: Text
, ruleRegex :: Text
, ruleReplacement :: Text
} deriving (Eq, Ord, Show)
instance FromJSON RewriteRule where
parseJSON (Object o) = RewriteRule
<$> o .: "header"
<*> o .: "from"
<*> o .: "to"
parseJSON _ = fail "Wanted an object"
instance ToJSON RewriteRule where
toJSON RewriteRule {..} = object
[ "header" .= ruleHeader
, "from" .= ruleRegex
, "to" .= ruleReplacement
]
| snoyberg/keter | Network/HTTP/ReverseProxy/Rewrite.hs | mit | 7,638 | 0 | 26 | 1,870 | 2,022 | 1,106 | 916 | 184 | 4 |
{-# LANGUAGE OverloadedStrings #-}
-- import Text.HTML.TagSoup
-- import Text.HTML.TagSoup.Tree
import GHC.Int
import Data.Maybe
import Control.Applicative ( (<$>) )
import Network.HTTP.Conduit
import Data.String.Conversions (cs)
import Data.Aeson
import Data.Aeson.Types
import Data.ByteString.Lazy as BL
import Data.Text
type TweetId = Int64
timelineURI :: String -> Maybe TweetId -> String
timelineURI tweep maxTweetId =
"https://twitter.com/i/profiles/show/" ++ tweep ++ "/timeline?include_entities=1" ++
fromMaybe "" (("&max_id=" ++) . show <$> maxTweetId)
main :: IO ()
main = do
body <- simpleHttp (timelineURI "drboolean" Nothing)
let html = fromMaybe "" $ htmlPayload body
Prelude.putStr $ cs html
htmlPayload :: BL.ByteString -> Maybe Text
htmlPayload v = do
obj <- decode v
flip parseMaybe obj $ \x -> x .: "items_html"
| begriffs/twittective | Main.hs | mit | 855 | 0 | 11 | 136 | 248 | 132 | 116 | 24 | 1 |
-- |
module Language.Imperative.C where
import Language.Imperative
import qualified Language.C as C
type CType a = C.CDeclaration a
type CLit a = C.CConstant a
type CExpr e s a = Expr (CLit a) (CType a) e s a
type CStmt e s a = Statement (CLit a) (CType a) e s a
type CCase s a = Case (CLit a) s a
type CPat a = Pattern (CLit a) a
newtype Fix f = Fix (f (Fix f))
newtype FCExpr a = FCExpr (CExpr (FCExpr a) (FCStmt a) a)
newtype FCStmt a = FCStmt (CStmt (FCExpr a) (FCStmt a) a)
fixS s = FCStmt s
fixE e = FCExpr e
fromC :: C.CStatement a -> FCStmt a
fromC (C.CLabel i stmt attrs a) =
fixS $
SBlock a [ fixS $ SLabel a (Label $ C.identToString i)
, fromC stmt
]
fromC (C.CSwitch expr stmt a) =
fixS $ SSwitch a (fromCE expr) (toCases stmt) Nothing
fromCE :: C.CExpression a -> FCExpr a
fromCE (C.CAssign assignOp e1 e2 a) =
fixE $
EAssign a (fromCE e1) (fromCE e2) -- TODO use assignOp
toCases :: C.CStatement a -> [CCase (FCStmt a) a]
toCases (C.CCase expr next@(C.CCase{}) a) =
(Case a (toPattern expr) (fixS $ SBlock a []))
: toCases next
toPattern :: C.CExpression a -> CPat a
toPattern (C.CConst c) = Pattern (C.annotation c) c
| sinelaw/imperative | src/Language/Imperative/C.hs | mit | 1,197 | 0 | 13 | 288 | 582 | 305 | 277 | 31 | 1 |
module Main (main) where
-- Imports from 'tasty'
import Test.Tasty (defaultMain, testGroup)
-- Imports from 'jupyter'
import Jupyter.Test.Client (clientTests)
import Jupyter.Test.Install (installTests)
import Jupyter.Test.Kernel (kernelTests)
import Jupyter.Test.ZeroMQ (zmqTests)
-- | Run all Haskell tests for the @jupyter@ package.
main :: IO ()
main =
defaultMain $
testGroup "Tests" [installTests, zmqTests, kernelTests, clientTests]
| gibiansky/jupyter-haskell | tests/Test.hs | mit | 498 | 0 | 7 | 111 | 105 | 65 | 40 | 10 | 1 |
import Control.Arrow ((&&&))
import Data.List (transpose, sort, group)
colFreq :: Ord a => [a] -> [a]
colFreq = map snd . sort . count
where count = map (length &&& head) . group . sort
decode :: Ord a => ([a] -> a) -> [[a]] -> [a]
decode f = map (f . colFreq) . transpose
main :: IO ()
main = do
input <- lines <$> readFile "../input.txt"
print (decode last input, decode head input) | mattvperry/AoC_2016 | day06/haskell/day6.hs | mit | 399 | 0 | 11 | 90 | 204 | 108 | 96 | 11 | 1 |
module Language.Pal.Parser
( expr
) where
import Control.Applicative
import Text.Parsec.Char hiding (string)
import Text.Parsec.Combinator
import Text.Parsec.String
import Text.Parsec ((<?>))
import Language.Pal.Types
list :: Parser LValue
list = char '(' *> (List <$> (expr `sepBy` whitespaces)) <* char ')'
whitespace :: Parser Char
whitespace = oneOf " \n\t"
whitespaces :: Parser String
whitespaces = many1 whitespace
expr :: Parser LValue
expr = Atom <$> atom
<|> list
<|> Number <$> number
<|> String <$> string
<|> Bool <$> bool
<|> quoted
<?> "expression"
atom :: Parser LAtom
atom = many1 $ oneOf symbolChars
symbolChars :: String
symbolChars = ['a'..'z'] ++ ['A'..'Z'] ++ "+-*/_!?<>"
number :: Parser LNumber
number = read <$> many1 digit
string :: Parser LString
string = char '"' *> many (noneOf "\"") <* char '"'
bool :: Parser Bool
bool = char '#' *> ((char 't' *> pure True) <|> (char 'f' *> pure False))
quoted :: Parser LValue
quoted = (List . (Atom "quote" :) . singleton) <$> (char '\'' *> expr)
where singleton a = [a]
| samstokes/pal | Language/Pal/Parser.hs | mit | 1,103 | 0 | 14 | 234 | 402 | 215 | 187 | 35 | 1 |
-- Harshad or Niven numbers
-- http://www.codewars.com/kata/54a0689443ab7271a90000c6/
module Codewars.Kata.Harshad where
import Control.Arrow ((&&&))
import Data.Char (digitToInt)
import Data.Maybe (fromMaybe)
isValid :: Integer -> Bool
isValid = (==0) . uncurry mod . (id &&& fromIntegral . foldr ((+) . digitToInt) 0 . show)
getNext :: Integer -> Integer
getNext n = head . filter isValid $ [n+1, n+2 ..]
getSerie :: Int -> Maybe Integer -> [Integer]
getSerie n = take n . iterate getNext . getNext . fromMaybe 0
| gafiatulin/codewars | src/6 kyu/Harshad.hs | mit | 522 | 0 | 11 | 86 | 190 | 105 | 85 | -1 | -1 |
module Main where
import Control.Monad
import Control.Monad.Except
import Control.Monad.IO.Class (liftIO)
import Data.Char
import System.IO
import Language.Janus.AST
import Language.Janus.Interp
import Language.Janus.Parser
main :: IO ()
main = do
putStrLn "Janus REPL"
putStrLn "type :q to quit"
escInterp prompt
escInterp :: InterpM a -> IO ()
escInterp m = do
result <- runInterpM m
case result of
Left err -> putStrLn "ERROR" >> print err >> return ()
_ -> return ()
prompt :: InterpM ()
prompt = do
liftIO (putStr ">>> " >> hFlush stdout)
line <- trim <$> liftIO getLine
case line of
"" -> prompt
":q" -> return ()
(':':_) -> do
liftIO (putStrLn "unknown meta command" >> hFlush stdout)
prompt
_ -> do
result <- runLine line
liftIO (print result >> hFlush stdout)
prompt
where
runLine line = case parseStatement line of
Left parseErr -> do
liftIO (print parseErr >> hFlush stdout)
return JUnit
Right ast -> eval ast `catchError` \err -> do
liftIO (print err >> hFlush stdout)
return JUnit
trim xs = dropSpaceTail "" $ dropWhile isSpace xs
where
dropSpaceTail maybeStuff "" = ""
dropSpaceTail maybeStuff (x:xs)
| isSpace x = dropSpaceTail (x:maybeStuff) xs
| null maybeStuff = x : dropSpaceTail "" xs
| otherwise = reverse maybeStuff ++ x : dropSpaceTail "" xs
| mkaput/janus | repl/Main.hs | mit | 1,531 | 0 | 17 | 474 | 529 | 248 | 281 | 47 | 5 |
{-# htermination (fromEnumRatio :: Ratio MyInt -> MyInt) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Tup2 a b = Tup2 a b ;
data Double = Double MyInt MyInt ;
data Float = Float MyInt MyInt ;
data Integer = Integer MyInt ;
data MyInt = Pos Nat | Neg Nat ;
data Nat = Succ Nat | Zero ;
data Ratio a = CnPc a a;
truncateM0 xu (Tup2 m vv) = m;
fromIntMyInt :: MyInt -> MyInt
fromIntMyInt x = x;
properFractionQ1 xv xw (Tup2 q vw) = q;
stop :: MyBool -> a;
stop MyFalse = stop MyFalse;
error :: a;
error = stop MyTrue;
primMinusNatS :: Nat -> Nat -> Nat;
primMinusNatS (Succ x) (Succ y) = primMinusNatS x y;
primMinusNatS Zero (Succ y) = Zero;
primMinusNatS x Zero = x;
primDivNatS0 x y MyTrue = Succ (primDivNatS (primMinusNatS x y) (Succ y));
primDivNatS0 x y MyFalse = Zero;
primGEqNatS :: Nat -> Nat -> MyBool;
primGEqNatS (Succ x) Zero = MyTrue;
primGEqNatS (Succ x) (Succ y) = primGEqNatS x y;
primGEqNatS Zero (Succ x) = MyFalse;
primGEqNatS Zero Zero = MyTrue;
primDivNatS :: Nat -> Nat -> Nat;
primDivNatS Zero Zero = error;
primDivNatS (Succ x) Zero = error;
primDivNatS (Succ x) (Succ y) = primDivNatS0 x y (primGEqNatS x y);
primDivNatS Zero (Succ x) = Zero;
primQuotInt :: MyInt -> MyInt -> MyInt;
primQuotInt (Pos x) (Pos (Succ y)) = Pos (primDivNatS x (Succ y));
primQuotInt (Pos x) (Neg (Succ y)) = Neg (primDivNatS x (Succ y));
primQuotInt (Neg x) (Pos (Succ y)) = Neg (primDivNatS x (Succ y));
primQuotInt (Neg x) (Neg (Succ y)) = Pos (primDivNatS x (Succ y));
primQuotInt ww wx = error;
primModNatS0 x y MyTrue = primModNatS (primMinusNatS x (Succ y)) (Succ (Succ y));
primModNatS0 x y MyFalse = Succ x;
primModNatS :: Nat -> Nat -> Nat;
primModNatS Zero Zero = error;
primModNatS Zero (Succ x) = Zero;
primModNatS (Succ x) Zero = error;
primModNatS (Succ x) (Succ Zero) = Zero;
primModNatS (Succ x) (Succ (Succ y)) = primModNatS0 x y (primGEqNatS x (Succ y));
primRemInt :: MyInt -> MyInt -> MyInt;
primRemInt (Pos x) (Pos (Succ y)) = Pos (primModNatS x (Succ y));
primRemInt (Pos x) (Neg (Succ y)) = Pos (primModNatS x (Succ y));
primRemInt (Neg x) (Pos (Succ y)) = Neg (primModNatS x (Succ y));
primRemInt (Neg x) (Neg (Succ y)) = Neg (primModNatS x (Succ y));
primRemInt vy vz = error;
primQrmInt :: MyInt -> MyInt -> Tup2 MyInt MyInt;
primQrmInt x y = Tup2 (primQuotInt x y) (primRemInt x y);
quotRemMyInt :: MyInt -> MyInt -> Tup2 MyInt MyInt
quotRemMyInt = primQrmInt;
properFractionVu30 xv xw = quotRemMyInt xv xw;
properFractionQ xv xw = properFractionQ1 xv xw (properFractionVu30 xv xw);
properFractionR0 xv xw (Tup2 vx r) = r;
properFractionR xv xw = properFractionR0 xv xw (properFractionVu30 xv xw);
properFractionRatio :: Ratio MyInt -> Tup2 MyInt (Ratio MyInt)
properFractionRatio (CnPc x y) = Tup2 (fromIntMyInt (properFractionQ x y)) (CnPc (properFractionR x y) y);
truncateVu6 xu = properFractionRatio xu;
truncateM xu = truncateM0 xu (truncateVu6 xu);
truncateRatio :: Ratio MyInt -> MyInt
truncateRatio x = truncateM x;
fromEnumRatio :: Ratio MyInt -> MyInt
fromEnumRatio = truncateRatio;
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/basic_haskell/fromEnum_2.hs | mit | 3,161 | 0 | 9 | 625 | 1,449 | 758 | 691 | 70 | 1 |
module Main where
import Interpreter
import PowAST
import qualified Parse as P
import qualified Data.Map as M
import Text.ParserCombinators.Parsec (parse)
main :: IO ()
main = runMoreCode M.empty M.empty
runMoreCode :: SymTab -> FunTab -> IO ()
runMoreCode vtab ftab = do
putStr "> "
input <- getLine
let ast = parse P.expr "" input
case ast of
Right ast' -> do
(_, vtab') <- evalExpr vtab ftab (Write ast')
putStr "\n"
runMoreCode vtab' ftab
Left e -> do
putStrLn $ show e
putStr "\n"
runMoreCode vtab ftab
| rloewe/petulant-octo-wallhack | Repl.hs | mit | 563 | 0 | 15 | 143 | 207 | 102 | 105 | 22 | 2 |
answer = length $ combinations sums
coins = [200,100,50,20,10,5,2,1]
sums = [200,0,0,0,0,0,0,0]
combinations :: [Int] -> [[Int]]
combinations ts
| next ts == [] = [ts]
| otherwise = ts : combinations(next ts)
next :: [Int] -> [Int]
next ts
| ts!!6>0 = fst' 6 ++ [ (ts!!6)-2, (ts!!7)+2]
| ts!!5>0&&mod2 = fst' 5 ++ [ts!!5- 5, sum(snd' 6)+ 5, 0]
| ts!!5>0 = fst' 5 ++ [ts!!5- 5, sum(snd' 6)+ 4, 1]
| ts!!4>0 = fst' 4 ++ [ts!!4- 10, sum (snd' 5)+ 10, 0,0]
| ts!!3>0 = fst' 3 ++ [ts!!3- 20, sum (snd' 4)+ 20, 0,0,0]
| ts!!2>0&&mod20 = fst' 2 ++ [ts!!2- 50, sum (snd' 3)+ 50, 0,0,0,0]
| ts!!2>0 = fst' 2 ++ [ts!!2- 50, sum (snd' 3)+ 40, 10,0,0,0]
| ts!!1>0 = fst' 1 ++ [ts!!1-100, sum (snd' 2)+100, 0,0,0,0,0]
| ts!!0>0 = [0, 200, 0,0,0,0,0,0]
| otherwise = [] where
fst' n = fst $ splitAt n ts
snd' n = snd $ splitAt n ts
mod2 = (sum(snd' 6)+ 5) `mod` 2 == 0
mod20 = (sum(snd' 3)+50) `mod` 20 == 0
-- down' :: [Int] -> [Int]
-- down' ts
-- | ts!!6>0 = fst' 6 ++ [(ts!!6)-2, (ts!!7)+2]
-- | take 2 (snd' 5)==[5,0] = fst' 5 ++ [0, sum (snd' 5), 0]
-- | ts!!5>0 = fst' 5 ++ [ts!!5-5, ts!!6+4, ts!!7+1]
-- | take 3 (snd' 4)==[10,0,0] = fst' 4 ++ [0, sum (snd' 4), 0, 0]
-- | ts!!4>0 = fst' 4 ++ [ts!!4-10, ts!!5+10] ++ snd' 6
-- | take 4 (snd' 3)==[20,0,0,0] = fst' 3 ++ [0, sum (snd' 3), 0, 0, 0]
-- | ts!!3>0 = fst' 3 ++ [ts!!3-20, ts!!4+20] ++ snd' 5
-- | take 5 (snd' 2)==[50,0,0,0,0] = fst' 2 ++ [0, sum (snd' 2), 0, 0, 0, 0]
-- | ts!!2>0 = fst' 2 ++ [ts!!2-50, ts!!3+40, ts!!4+10]++ snd' 5
-- | take 6 (snd' 1)==[100,0,0,0,0,0] = fst' 1 ++ [0, sum (snd' 1), 0, 0, 0, 0, 0]
-- | ts!!1>0 = fst' 1 ++ [(ts!!1)-100,(ts!!2)+100] ++ snd' 3
-- | ts!!0==200 = [0, 200, 0, 0, 0, 0, 0, 0]
-- | otherwise = [] where
-- fst' n = fst $ splitAt n ts
-- snd' n = snd $ splitAt n ts
-- downs :: [Int] -> [[Int]]
-- downs ts | next == [] = ts
-- | otherwise = ts : downs next where
-- next = down ts
--
-- down :: [Int] -> [Int]
-- down ts | ts!!6 > 0 = [zipWith (+) ts [ 0, 0, 0, 0, 0, 0,-1, 2] | i<-[1..(ts!!6)]] -- 2p
-- | ts!!5 > 0 = [zipWith (+) ts [ 0, 0, 0, 0, 0,-1, 2, 1] | i<-[1..(ts!!6)]] -- 5p
-- | ts!!4 > 0 = [zipWith (+) ts [ 0, 0, 0, 0,-1, 2, 0, 0] | i<-[1..(ts!!6)]] -- 10p
-- | ts!!3 > 0 = [zipWith (+) ts [ 0, 0, 0,-1, 2, 0, 0, 0] | i<-[1..(ts!!6)]] -- 20p
-- | ts!!2 > 0 = [zipWith (+) ts [ 0, 0,-1, 2, 1, 0, 0, 0] | i<-[1..(ts!!6)]] -- 50p
-- | ts!!1 > 0 = [zipWith (+) ts [ 0,-1, 2, 0, 0, 0, 0, 0] | i<-[1..(ts!!6)]] -- 100p
-- | ts!!0 > 0 = [zipWith (+) ts [-1, 2, 0, 0, 0, 0, 0, 0] | i<-[1..(ts!!6)]] -- 200p
-- | otherwise = []
| yuto-matsum/contest-util-hs | src/Euler/031.hs | mit | 3,090 | 0 | 13 | 1,123 | 815 | 445 | 370 | 23 | 1 |
module Options (
extractOptions,
isHelp,
isVerbose,
imgOptions,
eqInlineOptions,
fontSize,
outputFile,
packages,
showHelp
) where
import System.Console.GetOpt
import qualified Data.Text as T (pack, unpack, split)
data Flag = Help | Verbose | Package String | FontSize String | Output String
| ImgOpts String | EqInlineOpts String deriving (Show,Eq)
options :: [OptDescr Flag]
options =
[ Option ['h'] ["help"] (NoArg Help) "Show this help and exit"
, Option ['v'] ["verbose"] (NoArg Verbose) "Verbose and keep LaTeX files"
, Option ['o'] ["output"] (ReqArg Output "FILE") "Output FILE"
, Option ['f'] ["fontsize"] (ReqArg FontSize "FONTSIZE") "change the font size in LaTeX files"
, Option ['p'] ["package"] (ReqArg Package "PACKAGES")
"Comma separated list of packages to be included in the LaTeX"
, Option [] ["img"] (ReqArg ImgOpts "STRING")
"Attributes for <img>-tags generated from the <tex>-tags"
, Option [] ["eq-inline"] (ReqArg EqInlineOpts "STRING")
"Attributes for <img>-tags generated from the <$>-tags"
]
extractOptions :: [String] -> IO ([Flag], [String])
extractOptions argv =
case getOpt Permute options argv of
(o,n,[] ) -> return (o,n)
(_,_,errs) -> ioError (userError (concat errs ++ showHelp))
-- is the help flag set?
isHelp :: [Flag] -> Bool
isHelp flags = Help `elem` flags
-- is the verbose flag set?
isVerbose :: [Flag] -> Bool
isVerbose flags = Verbose `elem` flags
-- output file name
outputFile :: String -> [Flag] -> String
outputFile defaultname [] = defaultname
outputFile defaultname (flag:flags) =
case flag of
Output str -> str
_ -> outputFile defaultname flags
eqInlineOptions :: [Flag] -> String
eqInlineOptions [] = ""
eqInlineOptions (flag:flags) =
case flag of
EqInlineOpts str -> str
_ -> eqInlineOptions flags
imgOptions :: [Flag] -> String
imgOptions [] = ""
imgOptions (flag:flags) =
case flag of
ImgOpts str -> str
_ -> imgOptions flags
-- check for fontsize option
fontSize :: String -> [Flag] -> String
fontSize sizedefault [] = sizedefault
fontSize sizedefault (flag:flags) =
case flag of
FontSize str -> str
_ -> fontSize sizedefault flags
-- check for Package Option and return list of Strings
packages :: [Flag] -> [String]
packages [] = []
packages (flag:flags) =
case flag of
Package str -> (split str) ++ (packages flags)
_ -> packages flags
where split str = ( map T.unpack . T.split (==',') . T.pack) str
-- show help text
showHelp :: String
showHelp = usageInfo "USAGE: texerupter [-hv] [-o FILE] [-f FONTSIZE]\
\ [-p PACKAGES] [--img=STRING] [--eq-inline=STRING] FILE \n" options
| dino-r/TexErupter | src/Options.hs | mit | 2,863 | 4 | 14 | 720 | 860 | 469 | 391 | 69 | 2 |
import Network.HTTP.Server
import Network.HTTP.Server.Logger
import Network.HTTP.Server.HtmlForm as Form
import Network.URL as URL
import Text.JSON
import Text.JSON.String(runGetJSON)
import Text.XHtml
import Codec.Binary.UTF8.String
import Control.Exception(try,SomeException)
import System.FilePath
import Data.List(isPrefixOf)
main :: IO ()
main = serverWith defaultConfig { srvLog = stdLogger, srvPort = 8888 }
$ \_ url request ->
case rqMethod request of
GET ->
do let ext = takeExtension (url_path url)
mb_txt <- try (readFile (url_path url))
case mb_txt of
Right a -> return $ if ext == ".html"
then sendHTML OK (primHtml a)
else sendScript OK a
Left e -> return $ sendHTML NotFound $
thehtml $ concatHtml
[ thead noHtml
, body $ concatHtml
[ toHtml "I could not find "
, toHtml $ exportURL url { url_type = HostRelative }
, toHtml ", so I made this with XHTML combinators. "
, toHtml $ hotlink "example.html" (toHtml "Try this instead.")
]
]
where _hack :: SomeException
_hack = e -- to specify the type
POST ->
return $
case findHeader HdrContentType request of
Just ty
| "application/x-www-form-urlencoded" `isPrefixOf` ty ->
case URL.importParams txt of
Just fields -> sendHTML OK $
toHtml "You posted a URL encoded form:" +++ br +++
toHtml (show fields) +++ br +++
hotlink "example.html" (toHtml "back")
Nothing -> sendHTML BadRequest $
toHtml "Could not understand URL encoded form data"
| "multipart/form-data" `isPrefixOf` ty ->
case Form.fromRequest request of
Just fields -> sendHTML OK $
toHtml "You posted a multipart form:" +++ br +++
toHtml (show (Form.toList fields)) +++ br +++
hotlink "example.html" (toHtml "back")
Nothing -> sendHTML BadRequest $
toHtml "Could not understand multipart form data"
| "application/json" `isPrefixOf` ty ->
case runGetJSON readJSValue txt of
Right val -> sendJSON OK $
JSObject $ toJSObject [("success", JSString $ toJSString "hello")]
Left err -> sendJSON BadRequest $
JSObject $ toJSObject [("error", JSString $ toJSString err)]
x -> sendHTML BadRequest $
toHtml $ "I don't know how to deal with POSTed content" ++
" of type " ++ show x
-- we assume UTF8 encoding
where txt = decodeString (rqBody request)
_ -> return $ sendHTML BadRequest $ toHtml "I don't understand"
sendText :: StatusCode -> String -> Response String
sendText s v = insertHeader HdrContentLength (show (length txt))
$ insertHeader HdrContentEncoding "UTF-8"
$ insertHeader HdrContentEncoding "text/plain"
$ (respond s :: Response String) { rspBody = txt }
where txt = encodeString v
sendJSON :: StatusCode -> JSValue -> Response String
sendJSON s v = insertHeader HdrContentType "application/json"
$ sendText s (showJSValue v "")
sendHTML :: StatusCode -> Html -> Response String
sendHTML s v = insertHeader HdrContentType "text/html"
$ sendText s (renderHtml v)
sendScript :: StatusCode -> String -> Response String
sendScript s v = insertHeader HdrContentType "application/x-javascript"
$ sendText s v
| GaloisInc/http-server | example/SimpleWeb.hs | mit | 3,778 | 0 | 26 | 1,292 | 955 | 476 | 479 | 78 | 9 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE FlexibleContexts #-}
{- |
Module : Control.Lens.SemiIso
Description : Semi-isomorphisms.
Copyright : (c) Paweł Nowak
License : MIT
Maintainer : Paweł Nowak <[email protected]>
Stability : experimental
Semi-isomorphisms were motivated by reversible parsing/pretty printing. For example
we can map a number 12 to a string "12" (and the other way around). But the isomorphism
is partial - we cannot map the string "forty-two" to a number.
Another example: when parsing a list of numbers like "12_53___42" we want to skip underscores
between numbers (and forget about them). During pretty printing we have to decide how many
underscores should we insert between numbers. Let's say we insert a single underscore. But now
@prettyPrint (parse "12_53___42") = "12_53_42"@ and not "12_53___42". We have to weaken
isomorphism laws to allow such semi-iso. Notice that
> parse (prettyPrint (parse "12_53___42")) = parse "12_53___42"
> prettyPrint (parse (prettyPrint [12, 53, 42])) = prettyPrint [12, 53, 42]
Our semi-isomorphisms will obey weakened laws:
> apply i >=> unapply i >=> apply i = apply i
> unapply i >=> apply i >=> unapply i = unapply i
When you see an "Either String a", the String is usually an error message.
Disclaimer: the name "semi-isomorphism" is fictitious and made up for this library.
Any resemblance to known mathematical objects of the same name is purely coincidental.
-}
module Control.Lens.SemiIso (
-- * Semi-isomorphism types.
SemiIso,
SemiIso',
ASemiIso,
ASemiIso',
-- * Patterns.
pattern SemiIso,
-- * Constructing semi-isos.
semiIso,
cloneSemiIso,
-- * Reified semi-isos.
ReifiedSemiIso'(..),
reifySemiIso,
-- * Consuming semi-isos.
apply,
unapply,
withSemiIso,
viewSemiIso,
-- * Common semi-isomorphisms and isomorphisms.
unit,
swapped,
associated,
constant,
exact,
bifiltered,
alwaysFailing,
-- * Semi-isos for numbers.
_Negative,
-- * Transforming semi-isos.
rev,
prod,
elimFirst,
elimSecond,
attempt,
attemptAp,
attemptUn,
attempt_,
attemptAp_,
attemptUn_,
-- * Bidirectional folds.
bifoldr,
bifoldr1,
bifoldl,
bifoldl1,
bifoldr_,
bifoldr1_,
bifoldl_,
bifoldl1_
) where
import Control.Arrow (Kleisli(..))
import Control.Category
import Control.Category.Structures
import Control.Lens.Internal.SemiIso
import Control.Lens.Iso
import Data.Foldable
import Data.Functor.Identity
import Data.Profunctor.Exposed
import Data.Traversable
import Prelude hiding (id, (.))
-- | A semi-isomorphism is a partial isomorphism with weakened laws.
--
-- Should satisfy laws:
--
-- > apply i >=> unapply i >=> apply i = apply i
-- > unapply i >=> apply i >=> unapply i = unapply i
--
-- Every 'Prism' is a 'SemiIso'.
-- Every 'Iso' is a 'Prism'.
type SemiIso s t a b = forall p f. (Exposed (Either String) p, Traversable f)
=> p a (f b) -> p s (f t)
-- | Non-polymorphic variant of 'SemiIso'.
type SemiIso' s a = SemiIso s s a a
-- | When you see this as an argument to a function, it expects a 'SemiIso'.
type ASemiIso s t a b = Retail a b a (Identity b) -> Retail a b s (Identity t)
-- | When you see this as an argument to a function, it expects a 'SemiIso''.
type ASemiIso' s a = ASemiIso s s a a
-- | A nice pattern synonym for SemiIso's. Gives you the two functions, just like
-- 'viewSemiIso' or 'fromSemiIso'.
pattern SemiIso sa bt <- (viewSemiIso -> (sa, bt))
-- | A semi-iso stored in a container.
newtype ReifiedSemiIso' s a = ReifiedSemiIso' { runSemiIso :: SemiIso' s a }
instance Category ReifiedSemiIso' where
id = ReifiedSemiIso' id
ReifiedSemiIso' f . ReifiedSemiIso' g = ReifiedSemiIso' (g . f)
instance Products ReifiedSemiIso' where
-- TODO: pattern synonyms dont work here for some reason
first (ReifiedSemiIso' ai) = withSemiIso ai $ \f g ->
ReifiedSemiIso' $ cloneSemiIso $
semiIso (runKleisli $ first $ Kleisli f)
(runKleisli $ first $ Kleisli g)
second (ReifiedSemiIso' ai) = withSemiIso ai $ \f g ->
ReifiedSemiIso' $ cloneSemiIso $
semiIso (runKleisli $ second $ Kleisli f)
(runKleisli $ second $ Kleisli g)
ReifiedSemiIso' ai *** ReifiedSemiIso' ai' = ReifiedSemiIso' $
withSemiIso ai $ \f g -> withSemiIso ai' $ \f' g' ->
semiIso (runKleisli $ Kleisli f *** Kleisli f')
(runKleisli $ Kleisli g *** Kleisli g')
instance Coproducts ReifiedSemiIso' where
left (ReifiedSemiIso' ai) = withSemiIso ai $ \f g ->
ReifiedSemiIso' $ cloneSemiIso $
semiIso (runKleisli $ left $ Kleisli f)
(runKleisli $ left $ Kleisli g)
right (ReifiedSemiIso' ai) = withSemiIso ai $ \f g ->
ReifiedSemiIso' $ cloneSemiIso $
semiIso (runKleisli $ right $ Kleisli f)
(runKleisli $ right $ Kleisli g)
ReifiedSemiIso' ai +++ ReifiedSemiIso' ai' = ReifiedSemiIso' $
withSemiIso ai $ \f g -> withSemiIso ai' $ \f' g' ->
semiIso (runKleisli $ Kleisli f +++ Kleisli f')
(runKleisli $ Kleisli g +++ Kleisli g')
instance CatPlus ReifiedSemiIso' where
cempty = ReifiedSemiIso' $ alwaysFailing "cempty"
ReifiedSemiIso' ai /+/ ReifiedSemiIso' ai' = ReifiedSemiIso' $
withSemiIso ai $ \f g -> withSemiIso ai' $ \f' g' ->
semiIso (runKleisli $ Kleisli f /+/ Kleisli f')
(runKleisli $ Kleisli g /+/ Kleisli g')
-- | Constructs a semi isomorphism from a pair of functions that can
-- fail with an error message.
semiIso :: (s -> Either String a) -> (b -> Either String t) -> SemiIso s t a b
semiIso sa bt = merge . dimap sa (sequenceA . fmap bt) . expose
-- | Clones a semi-iso.
cloneSemiIso :: ASemiIso s t a b -> SemiIso s t a b
cloneSemiIso (SemiIso sa bt) = semiIso sa bt
-- | Applies the 'SemiIso'.
apply :: ASemiIso s t a b -> s -> Either String a
apply (SemiIso sa _) = sa
-- | Applies the 'SemiIso' in the opposite direction.
unapply :: ASemiIso s t a b -> b -> Either String t
unapply (SemiIso _ bt) = bt
-- | Extracts the two functions that characterize the 'SemiIso'.
withSemiIso :: ASemiIso s t a b
-> ((s -> Either String a) -> (b -> Either String t) -> r)
-> r
withSemiIso ai k = case ai (Retail Right (Right . Identity)) of
Retail sa bt -> k sa (rmap (runIdentity . sequenceA) bt)
-- | Extracts the two functions that characterize the 'SemiIso'.
viewSemiIso :: ASemiIso s t a b -> (s -> Either String a, b -> Either String t)
viewSemiIso ai = withSemiIso ai (,)
-- | Reifies a semi-iso.
reifySemiIso :: ASemiIso' s a -> ReifiedSemiIso' s a
reifySemiIso ai = ReifiedSemiIso' $ cloneSemiIso ai
-- | A trivial isomorphism between a and (a, ()).
unit :: Iso' a (a, ())
unit = iso (, ()) fst
-- | Products are associative.
associated :: Iso' (a, (b, c)) ((a, b), c)
associated = iso (\(a, (b, c)) -> ((a, b), c)) (\((a, b), c) -> (a, (b, c)))
-- | \-> Always returns the argument.
--
-- \<- Maps everything to a @()@.
--
-- Note that this isn't an @Iso'@ because
--
-- > unapply (constant x) >=> apply (constant x) /= id
--
-- But SemiIso laws do hold.
constant :: a -> SemiIso' () a
constant x = semiIso (\_ -> Right x) (\_ -> Right ())
-- | \-> Filters out all values not equal to the argument.
--
-- \<- Always returns the argument.
exact :: Eq a => a -> SemiIso' a ()
exact x = semiIso f g
where
f y | x == y = Right ()
| otherwise = Left "exact: not equal"
g _ = Right x
-- | Like 'filtered' but checks the predicate in both ways.
bifiltered :: (a -> Bool) -> SemiIso' a a
bifiltered p = semiIso check check
where check x | p x = Right x
| otherwise = Left "bifiltered: predicate failed"
-- | A semi-iso that fails in both directions.
alwaysFailing :: String -> SemiIso s t a b
alwaysFailing msg = semiIso (\_ -> Left msg) (\_ -> Left msg)
-- | \-> Matches only negative numbers, turns it into a positive one.
--
-- \<- Matches only positive numbers, turns it into a negative one.
_Negative :: Real a => SemiIso' a a
_Negative = semiIso f g
where
f x | x < 0 = Right (-x)
| otherwise = Left "_Negative: apply expected a negative number"
g x | x >= 0 = Right (-x)
| otherwise = Left "_Negative: unapply expected a positive number"
-- | Reverses a 'SemiIso'.
rev :: ASemiIso s t a b -> SemiIso b a t s
rev ai = withSemiIso ai $ \l r -> semiIso r l
-- | A product of semi-isos.
prod :: ASemiIso' s a -> ASemiIso' t b -> SemiIso' (s, t) (a, b)
prod a b = runSemiIso (reifySemiIso a *** reifySemiIso b)
-- | Uses an @SemiIso' a ()@ to construct a @SemiIso' (a, b) b@,
-- i.e. eliminates the first pair element.
elimFirst :: ASemiIso' s () -> SemiIso' (s, t) t
elimFirst ai = swapped . elimSecond ai
-- | Uses an @SemiIso b ()@ to construct a @SemiIso (a, b) a@,
-- i.e. eliminates the second pair element.
elimSecond :: ASemiIso' s () -> SemiIso' (t, s) t
elimSecond ai = runSemiIso (id *** reifySemiIso ai) . rev unit
-- | Transforms the semi-iso so that applying it in both directions never fails,
-- but instead catches any errors and returns them as an @Either String a@.
attempt :: ASemiIso s t a b -> SemiIso s (Either String t) (Either String a) b
attempt = attemptAp . attemptUn
-- | Transforms the semi-iso so that applying it in direction (->) never fails,
-- but instead catches any errors and returns them as an @Either String a@.
attemptAp :: ASemiIso s t a b -> SemiIso s t (Either String a) b
attemptAp (SemiIso sa bt) = semiIso (Right . sa) bt
-- | Transforms the semi-iso so that applying it in direction (<-) never fails,
-- but instead catches any errors and returns them as an @Either String a@.
attemptUn :: ASemiIso s t a b -> SemiIso s (Either String t) a b
attemptUn (SemiIso sa bt) = semiIso sa (Right . bt)
discard :: Either a b -> Maybe b
discard = either (const Nothing) Just
-- | Transforms the semi-iso like 'attempt', but ignores the error message.
attempt_ :: ASemiIso s t a b -> SemiIso s (Maybe t) (Maybe a) b
attempt_ ai = rmap (fmap discard) . attempt ai . lmap discard
-- | Transforms the semi-iso like 'attemptAp', but ignores the error message.
--
-- Very useful when you want to bifold using a prism.
attemptAp_ :: ASemiIso s t a b -> SemiIso s t (Maybe a) b
attemptAp_ ai = attemptAp ai . lmap discard
-- | Transforms the semi-iso like 'attemptUn', but ignores the error message.
attemptUn_ :: ASemiIso s t a b -> SemiIso s (Maybe t) a b
attemptUn_ ai = rmap (fmap discard) . attemptUn ai
-- | Monadic counterpart of 'foldl1' (or non-empty list counterpart of 'foldlM').
foldlM1 :: Monad m => (a -> a -> m a) -> [a] -> m a
foldlM1 f (x:xs) = foldlM f x xs
foldlM1 _ [] = fail "foldlM1: empty list"
-- | Monadic counterpart of 'foldr1' (or non-empty list counterpart of 'foldrM').
foldrM1 :: Monad m => (a -> a -> m a) -> [a] -> m a
foldrM1 _ [x] = return x
foldrM1 f (x:xs) = foldrM1 f xs >>= f x
foldrM1 _ [] = fail "foldrM1: empty list"
-- | Monadic counterpart of 'unfoldr'.
unfoldrM :: Monad m => (a -> m (Maybe (b, a))) -> a -> m (a, [b])
unfoldrM f a = do
r <- f a
case r of
Just (b, new_a) -> do
(final_a, bs) <- unfoldrM f new_a
return (final_a, b : bs)
Nothing -> return (a, [])
-- | A variant of 'unfoldrM' that always produces a non-empty list.
unfoldrM1 :: Monad m => (a -> m (Maybe (a, a))) -> a -> m [a]
unfoldrM1 f a = do
r <- f a
case r of
Just (b, new_a) -> do
bs <- unfoldrM1 f new_a
return (b : bs)
Nothing -> return [a]
-- | Monadic counterpart of 'unfoldl'.
unfoldlM :: Monad m => (a -> m (Maybe (a, b))) -> a -> m (a, [b])
unfoldlM f a0 = go a0 []
where
go a bs = do
r <- f a
case r of
Just (new_a, b) -> go new_a (b : bs)
Nothing -> return (a, bs)
-- | A variant of 'unfoldlM' that always produces a non-empty list.
unfoldlM1 :: Monad m => (a -> m (Maybe (a, a))) -> a -> m [a]
unfoldlM1 f a0 = go a0 []
where
go a bs = do
r <- f a
case r of
Just (new_a, b) -> go new_a (b : bs)
Nothing -> return (a : bs)
-- | Constructs a bidirectional fold. Works with prisms.
--
-- \-> Right unfolds using the (->) part of the given semi-iso, until it fails.
--
-- \<- Right folds using the (<-) part of the given semi-iso.
bifoldr :: ASemiIso' a (b, a) -> SemiIso' a (a, [b])
bifoldr = bifoldr_ . attemptAp_
-- | Constructs a bidirectional fold. Works with prisms.
--
-- \-> Right unfolds using the (->) part of the given semi-iso, until it fails.
-- It should produce a non-empty list.
--
-- \<- Right folds a non-empty list using the (<-) part of the given semi-iso.
bifoldr1 :: ASemiIso' a (a, a) -> SemiIso' a [a]
bifoldr1 = bifoldr1_ . attemptAp_
-- | Constructs a bidirectional fold. Works with prisms.
--
-- \-> Left unfolds using the (->) part of the given semi-iso, until it fails.
--
-- \<- Left folds using the (<-) part of the given semi-iso.
bifoldl :: ASemiIso' a (a, b) -> SemiIso' a (a, [b])
bifoldl = bifoldl_ . attemptAp_
-- | Constructs a bidirectional fold. Works with prisms.
--
-- \-> Left unfolds using the (->) part of the given semi-iso, until it fails.
-- It should produce a non-empty list.
--
-- \<- Left folds a non-empty list using the (<-) part of the given semi-iso.
bifoldl1 :: ASemiIso' a (a, a) -> SemiIso' a [a]
bifoldl1 = bifoldl1_ . attemptAp_
-- | Constructs a bidirectional fold.
--
-- \-> Right unfolds using the (->) part of the given semi-iso.
--
-- \<- Right folds using the (<-) part of the given semi-iso.
bifoldr_ :: ASemiIso a a (Maybe (b, a)) (b, a) -> SemiIso' a (a, [b])
bifoldr_ ai = semiIso (uf ai) (f ai)
where
f = uncurry . foldrM . curry . unapply
uf = unfoldrM . apply
-- | Constructs a bidirectional fold.
--
-- \-> Right unfolds using the (->) part of the given semi-iso. It should
-- produce a non-empty list.
--
-- \<- Right folds a non-empty list using the (<-) part of the given semi-iso.
bifoldr1_ :: ASemiIso a a (Maybe (a, a)) (a, a) -> SemiIso' a [a]
bifoldr1_ ai = semiIso (uf ai) (f ai)
where
f = foldrM1 . curry . unapply
uf = unfoldrM1 . apply
-- | Constructs a bidirectional fold.
--
-- \-> Left unfolds using the (->) part of the given semi-iso.
--
-- \<- Left folds using the (<-) part of the given semi-iso.
bifoldl_ :: ASemiIso a a (Maybe (a, b)) (a, b) -> SemiIso' a (a, [b])
bifoldl_ ai = semiIso (uf ai) (f ai)
where
f = uncurry . foldlM . curry . unapply
uf = unfoldlM . apply
-- | Constructs a bidirectional fold.
--
-- \-> Left unfolds using the (->) part of the given semi-iso. It should
-- produce a non-empty list.
--
-- \<- Left folds a non-empty list using the (<-) part of the given semi-iso.
bifoldl1_ :: ASemiIso a a (Maybe (a, a)) (a, a) -> SemiIso' a [a]
bifoldl1_ ai = semiIso (uf ai) (f ai)
where
f = foldlM1 . curry . unapply
uf = unfoldlM1 . apply
| pawel-n/semi-iso | Control/Lens/SemiIso.hs | mit | 15,262 | 0 | 14 | 3,662 | 3,960 | 2,090 | 1,870 | 221 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TemplateHaskell #-}
-- Copyright (C) 2012 John Millikin <[email protected]>
--
-- See license.txt for details
module OptionsTests.StringParsing
( suite_StringParsing
) where
import Control.Applicative
import Test.Chell
import Options
data StringOptions = StringOptions
{ optString :: String
, optString_defA :: String
, optString_defU :: String
}
instance Options StringOptions where
defineOptions = pure StringOptions
<*> simpleOption "string" "" ""
-- String, ASCII default
<*> simpleOption "string_defA" "a" ""
-- String, Unicode default
<*> simpleOption "string_defU" "\12354" ""
suite_StringParsing :: Suite
suite_StringParsing = suite "string-parsing"
[ test_Defaults
, test_Ascii
, test_UnicodeValid
, test_UnicodeInvalid
]
test_Defaults :: Test
test_Defaults = assertions "defaults" $ do
let opts = defaultOptions
$expect (equal (optString_defA opts) "a")
$expect (equal (optString_defU opts) "\12354")
test_Ascii :: Test
test_Ascii = assertions "ascii" $ do
let parsed = parseOptions ["--string=a"]
let Just opts = parsedOptions parsed
$expect (equal (optString opts) "a")
test_UnicodeValid :: Test
test_UnicodeValid = assertions "unicode-valid" $ do
#if defined(OPTIONS_ENCODING_UTF8)
let parsed = parseOptions ["--string=\227\129\130"]
#else
let parsed = parseOptions ["--string=\12354"]
#endif
let Just opts = parsedOptions parsed
$expect (equal (optString opts) "\12354")
test_UnicodeInvalid :: Test
test_UnicodeInvalid = assertions "unicode-invalid" $ do
#if __GLASGOW_HASKELL__ >= 704
let parsed = parseOptions ["--string=\56507"]
let expectedString = "\56507"
#elif __GLASGOW_HASKELL__ >= 702
let parsed = parseOptions ["--string=\61371"]
let expectedString = "\61371"
#else
let parsed = parseOptions ["--string=\187"]
let expectedString = "\56507"
#endif
let Just opts = parsedOptions parsed
$expect (equal (optString opts) expectedString)
| jmillikin/haskell-options | tests/OptionsTests/StringParsing.hs | mit | 1,981 | 24 | 15 | 324 | 429 | 222 | 207 | 43 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module AEAD.XChaCha20Poly1305Properties (
testAEADXChaCha20
) where
import Util
import Crypto.Saltine.Core.AEAD.XChaCha20Poly1305
import Crypto.Saltine.Class (decode)
import Crypto.Saltine.Internal.AEAD.XChaCha20Poly1305 as Bytes
import qualified Data.ByteString as S
import Data.Maybe (fromJust)
import Test.Framework.Providers.QuickCheck2
import Test.Framework
import Test.QuickCheck (Property, (==>))
import Test.QuickCheck.Arbitrary
instance Arbitrary Nonce where
arbitrary =
do bs <- S.pack <$> vector Bytes.aead_xchacha20poly1305_ietf_npubbytes
pure $ fromJust (decode bs)
instance Arbitrary Key where
arbitrary =
do bs <- S.pack <$> vector Bytes.aead_xchacha20poly1305_ietf_keybytes
pure $ fromJust (decode bs)
-- | Ciphertext can be decrypted
rightInverseProp :: Key -> Nonce -> Message -> Message -> Bool
rightInverseProp k n (Message bs) (Message aad) =
Just bs == aeadOpen k n (aead k n bs aad) aad
-- | Detached ciphertext/tag can be decrypted
rightInverseDetachedProp :: Key -> Nonce -> Message -> Message -> Bool
rightInverseDetachedProp k n (Message bs) (Message aad) =
let (tag,ct) = aeadDetached k n bs aad
in Just bs == aeadOpenDetached k n tag ct aad
-- | Ciphertext cannot be decrypted if the ciphertext is perturbed
rightInverseFailureProp :: Key -> Nonce -> Message -> Message -> Perturb -> Property
rightInverseFailureProp k n (Message bs) (Message aad) p =
S.length bs /= 0 ==>
let ct = aead k n bs aad
fakeCT = perturb ct p
in fakeCT /= ct ==> Nothing == aeadOpen k n fakeCT aad
-- | Ciphertext cannot be decrypted if the aad is perturbed
rightInverseAADFailureProp :: Key -> Nonce -> Message -> Message -> Message -> Property
rightInverseAADFailureProp k n (Message bs) (Message aad) (Message aad2) =
aad /= aad2 ==> Nothing == aeadOpen k n (aead k n bs aad) aad2
-- | Ciphertext cannot be decrypted if the tag is perturbed
rightInverseTagFailureProp :: Key -> Nonce -> Message -> Message -> Message -> Property
rightInverseTagFailureProp k n (Message bs) (Message aad) (Message newTag) =
let (tag,ct) = aeadDetached k n bs aad
in newTag /= tag ==> Nothing == aeadOpenDetached k n newTag ct aad
-- | Ciphertext cannot be decrypted if the ciphertext is perturbed
rightInverseFailureDetachedProp :: Key -> Nonce -> Message -> Message -> Perturb -> Property
rightInverseFailureDetachedProp k n (Message bs) (Message aad) p@(Perturb pBytes) =
let (tag,ct) = aeadDetached k n bs aad
in S.length bs > length pBytes ==>
Nothing == aeadOpenDetached k n tag (perturb ct p) aad
-- | Ciphertext cannot be decrypted with a different key
cannotDecryptKeyProp :: Key -> Key -> Nonce -> Message -> Message -> Property
cannotDecryptKeyProp k1 k2 n (Message bs) (Message aad) =
let ct = aead k1 n bs aad
in k1 /= k2 ==> Nothing == aeadOpen k2 n ct aad
-- | Ciphertext cannot be decrypted with a different key
cannotDecryptKeyDetachedProp :: Key -> Key -> Nonce -> Message -> Message -> Property
cannotDecryptKeyDetachedProp k1 k2 n (Message bs) (Message aad) =
let (tag,ct) = aeadDetached k1 n bs aad
in k1 /= k2 ==> Nothing == aeadOpenDetached k2 n tag ct aad
-- | Ciphertext cannot be decrypted with a different nonce
cannotDecryptNonceProp :: Key -> Nonce -> Nonce -> Message -> Message -> Property
cannotDecryptNonceProp k n1 n2 (Message bs) (Message aad) =
n1 /= n2 ==> Nothing == aeadOpen k n2 (aead k n1 bs aad) aad
-- | Ciphertext cannot be decrypted with a different nonce
cannotDecryptNonceDetachedProp :: Key -> Nonce -> Nonce -> Message -> Message -> Property
cannotDecryptNonceDetachedProp k n1 n2 (Message bs) (Message aad) =
let (tag,ct) = aeadDetached k n1 bs aad
in n1 /= n2 ==> Nothing == aeadOpenDetached k n2 tag ct aad
testAEADXChaCha20 :: Test
testAEADXChaCha20 = buildTest $ do
return $ testGroup "...Internal.AEAD.XChaCha20Poly1305" [
testProperty "Can decrypt ciphertext"
$ rightInverseProp,
testProperty "Can decrypt ciphertext (detached)"
$ rightInverseDetachedProp,
testGroup "Cannot decrypt ciphertext when..." [
testProperty "... ciphertext is perturbed"
$ rightInverseFailureProp,
testProperty "... AAD is perturbed"
$ rightInverseAADFailureProp,
testProperty "... ciphertext is perturbed (detached)"
$ rightInverseFailureDetachedProp,
testProperty "... tag is perturbed (detached)"
$ rightInverseTagFailureProp,
testProperty "... using the wrong key"
$ cannotDecryptKeyProp,
testProperty "... using the wrong key (detached)"
$ cannotDecryptKeyDetachedProp,
testProperty "... using the wrong nonce"
$ cannotDecryptNonceProp,
testProperty "... using the wrong nonce (detached"
$ cannotDecryptNonceDetachedProp
]
]
| tel/saltine | tests/AEAD/XChaCha20Poly1305Properties.hs | mit | 4,999 | 0 | 14 | 1,062 | 1,333 | 680 | 653 | -1 | -1 |
import Stomp
import System
main = do
args <- getArgs
client<- connect "localhost" 61613 []
send client (args !! 0) [] (args !! 1)
| akisystems/stomp-hs | src/Sender.hs | mit | 144 | 0 | 9 | 38 | 63 | 31 | 32 | 6 | 1 |
module SecretSanta.ConstraintMap
(constraintMap) where
import Control.Arrow ((&&&))
import Data.Map hiding (delete,filter,null)
import Prelude hiding (lookup)
import SecretSanta.Types
-- | Takes a list of participants,
-- | a list of arrangements,
-- | and a list of validation functions
-- | to produce a constraint mapping
constraintMap :: [Participant] -> [Arrangement] -> ConstraintMap
constraintMap xs ys = fromList constraints'
where
constraints' = fmap (name &&& fmap name . possibleRecipients) xs
possibleRecipients z = filter (satisfiesConstraints z) xs
satisfiesConstraints z
= and . (\./) fmap
[ not . grandparentOf z
, not . sameFamily z
, not . previousRecipient z
]
grandparentOf z a
= generation z == Grandparent
&& generation a == Grandchild
sameFamily z a
= family z == family a
previousRecipient z a
= any (isInMap z a) ys
isInMap z a
= (== Just True)
. fmap (== name a)
. lookup (name z)
(\./) :: (((a -> c1) -> c1) -> b -> c) -> b -> a -> c
(\./) = flip . (. flip id) -- swing combinator
{-- Should start using constraint functions like this
notGrandparentOf :: [Arrangement] -> Participant -> Participant -> Bool
notGrandparentOf _ x y = (generation x /= Grandparent)
|| (generation y /= Grandchild)
--}
| recursion-ninja/SecretSanta | Constraint/ConstraintMap.hs | gpl-3.0 | 1,449 | 0 | 11 | 429 | 363 | 196 | 167 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module Types where
import Control.Lens
data LPrimeConfig = LPrimeConfig
{ _dividend :: Integer
}
data LPrimeState = LPrimeState{}
$(makeLenses ''LPrimeConfig)
$(makeLenses ''LPrimeState)
| ignuki/projecteuler | 3/Types.hs | gpl-3.0 | 229 | 3 | 9 | 35 | 60 | 34 | 26 | 8 | 0 |
module CPUDefs where
import Types
import Control.Applicative
class (Monad m, Applicative m) => NesCPU m where
-- * Get registers.
getA, getX, getY, getStatus :: m Operand
getPC :: m Address
getSP :: m Word8
-- * Set registers.
setA, setX, setY, setStatus :: Operand -> m ()
setPC :: Address -> m ()
setSP :: Word8 -> m ()
-- * Alter registers and return the new value.
alterA, alterX, alterY :: (Operand -> Operand) -> m Operand
alterStatus :: (Operand -> Operand) -> m () -- notice!
alterPC :: (Address -> Address) -> m Address
alterSP :: (Word8 -> Word8) -> m Word8
-- * Memory.
readMemory :: Address -> m Operand
writeMemory :: Address -> Operand -> m ()
alterMemory :: Address -> (Operand -> Operand) -> m Operand
-- * Flags.
getFlagC, getFlagZ, getFlagI, getFlagD, getFlagB, getFlagQ, getFlagV
, getFlagN :: m Bool
setFlagC, setFlagZ, setFlagI, setFlagD, setFlagB, setFlagQ, setFlagV
, setFlagN :: Bool -> m ()
-- * Interrupt handler.
setIRQ :: (Maybe IRQ) -> m ()
getIRQ :: m (Maybe IRQ)
-- * Enterprise pulling.
handlePPUAction :: [Action] -> m () -- ^ Handle actions from PPU.
getCPUAction :: m Action -- ^ Recieve the action from CPU.
setCPUAction :: Action -> m () -- ^ Set the given action in CPU.
-- | The stack's starting position.
baseSP :: Address
baseSP = 0x0100
| tobsan/yane | CPUDefs.hs | gpl-3.0 | 1,428 | 0 | 10 | 379 | 394 | 231 | 163 | 28 | 1 |
module Strace.Parser
(
-- Trace file parsing
parseStrace,
Line(..),
TraceEvent(..),
TimeStamp(..),
-- Action Parsing (e.g. open() exec())
Action(..),
parseAction,
) where
import Data.Either (partitionEithers)
import Data.Int (Int64)
import Data.Time.Clock (UTCTime)
import Data.Time.Format (readTime)
import System.Locale (defaultTimeLocale)
import Text.Parsec (ParseError)
import Text.Parsec.ByteString.Lazy (Parser)
import Text.Parsec.Char
import Text.Parsec.Combinator
import Text.Parsec.Prim
import qualified Data.ByteString.Lazy.Char8 as B
import qualified Data.Map as Map
data Line = Line { pid :: ! Int, time :: ! TimeStamp, rest :: TraceEvent }
deriving Show
data TraceEvent = TraceEvent B.ByteString
| Unfinished B.ByteString
| Resumed String B.ByteString
deriving Show
type TimeStamp = UTCTime
parseStrace :: B.ByteString -> ([ParseError],[Line])
parseStrace contents = partitionEithers linesParse
where
linesText = B.lines contents
linesParse = map (parse parseLine "") linesText
parseLine :: Parser Line
parseLine = do
pid <- parseInt
spaces
ts <- parseTimeStamp
spaces
te <- parseTraceEvent
return (Line pid ts te)
parseDigits :: Parser String
parseDigits = many1 digit
parseDigitsDotDigits :: Parser String
parseDigitsDotDigits = do
d1 <- parseDigits
char '.'
d2 <- parseDigits
return (d1 ++ "." ++ d2)
parseIdent :: Parser String
parseIdent = do
first <- letter <|> char '_'
rest <- many (alphaNum <|> char '_')
return (first:rest)
parseInt :: Parser Int
parseInt = do
ds <- parseDigits
return (read ds)
parseTimeStamp :: Parser UTCTime
parseTimeStamp = do
ds <- parseDigitsDotDigits
return (readTime defaultTimeLocale "%s%Q" ds)
parseTraceEvent :: Parser TraceEvent
parseTraceEvent = try parseResumed <|> parseTraceEvent'
where
parseResumed, parseTraceEvent' :: Parser TraceEvent
parseResumed = do
string "<... "
id <- parseIdent
string " resumed>"
rest <- getInput
return (Resumed id rest)
parseTraceEvent' = do
rest <- getInput
let (pre,suf) = B.splitAt prefixLen rest
prefixLen, restLen :: Int64
prefixLen = restLen - unfinishedLen
restLen = B.length rest
return $ if unfinished == suf
then Unfinished pre
else TraceEvent rest
unfinished :: B.ByteString
unfinished = B.pack "<unfinished ...>"
unfinishedLen :: Int64
unfinishedLen = B.length unfinished
data Action = Signal { name :: String }
| Unknown { name :: String, args :: B.ByteString }
| SCexit_group { name :: String, retVal :: Int }
deriving Show
parseAction :: B.ByteString -> Action
parseAction bs = case parse parseAction' "" bs of
Left err -> Unknown "parseerror" (B.pack $ show err)
Right a -> a
parseAction' :: Parser Action
parseAction' = parseSignal <|> parseSyscall
parseSignal :: Parser Action
parseSignal = do
spaces
string "---"
spaces
n <- parseIdent
spaces
many anyChar
return $ Signal n
parseSyscall :: Parser Action
parseSyscall = do
spaces
n <- parseIdent
let p = findSyscallParser n
p n
findSyscallParser :: String -> (String -> Parser Action)
findSyscallParser id = Map.findWithDefault parseUnknownSyscall id syscallParserMap
syscallParserMap :: Map.Map String (String -> Parser Action)
syscallParserMap = Map.fromList [
("exit_group", parseSC'exit_group)
]
parseUnknownSyscall :: String -> Parser Action
parseUnknownSyscall n = do
rest <- getInput
return $ Unknown n rest
parseSC'exit_group :: String -> Parser Action
parseSC'exit_group n = do
code <- parens parseInt
spaces
char '='
spaces
char '?'
return $ SCexit_group n code
parens :: Parser a -> Parser a
parens = between (char '(') (char ')')
| alanfalloon/percipio | src/Strace/Parser.hs | gpl-3.0 | 4,064 | 4 | 13 | 1,066 | 1,175 | 607 | 568 | 130 | 2 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Model.PermissionUtil
( maskRestrictedString
) where
import qualified Data.String as ST
maskRestrictedString :: ST.IsString a => a -> a
maskRestrictedString s = const (ST.fromString "") s
| databrary/databrary | src/Model/PermissionUtil.hs | agpl-3.0 | 256 | 0 | 8 | 37 | 59 | 33 | 26 | 6 | 1 |
module NestedRoots.A338271Spec (main, spec) where
import Test.Hspec
import NestedRoots.A338271 (a338271)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A338271" $
it "correctly computes the first 20 elements" $
map a338271 [1..20] `shouldBe` expectedValue where
expectedValue = [1,0,0,2,0,2,0,2,2,4,2,6,2,8,4,14,6,20,8,28]
| peterokagey/haskellOEIS | test/NestedRoots/A338271Spec.hs | apache-2.0 | 352 | 0 | 8 | 57 | 154 | 92 | 62 | 10 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleInstances #-}
import Data.Maybe (maybe)
import Control.Monad (liftM)
newtype Reader e a = Reader { runReader :: e -> a }
instance Functor (Reader e) where
fmap f (Reader r) = Reader $ \e -> f (r e)
instance Applicative (Reader e) where
pure a = Reader $ \_ -> a
(Reader rf) <*> (Reader rx) = Reader $ \e -> (rf e) (rx e)
instance Monad (Reader e) where
-- return :: a -> Reader e a
return a = Reader $ \_ -> a
-- (>>=) :: Reader e a -> (a -> Reader e b) -> Reader e b
(Reader r) >>= f = Reader $ \e -> runReader (f (r e)) e
class MonadReader e m | m -> e where
ask :: m e
local :: (e -> e) -> m a -> m a
instance MonadReader e (Reader e) where
ask = Reader id
local f (Reader r) = Reader $ \e -> r (f e)
asks :: (Monad m, MonadReader e m) => (e -> a) -> m a
asks sel = ask >>= return . sel
-- Text | Variable | Quote | Include | Compound
data Template = T String | V Template | Q Template | I Template [Definition] | C [Template] deriving Show
data Definition = D Template Template deriving Show
data Environment = Env {templates :: [(String,Template)],
variables :: [(String,String)]} deriving Show
lookupVar :: String -> Environment -> Maybe String
lookupVar name env = lookup name (variables env)
lookupTemplate :: String -> Environment -> Maybe Template
lookupTemplate name env = lookup name (templates env)
addDefs :: [(String, String)] -> Environment -> Environment
addDefs defs env = env { variables = defs ++ (variables env) }
resolveDef :: Definition -> Reader Environment (String,String)
resolveDef (D t d) = do name <- resolve t
value <- resolve d
return (name, value)
-- resolve template into a stirng
resolve :: Template -> Reader Environment String
resolve (T s) = return s
resolve (V t) = do varName <- resolve t
varValue <- asks (lookupVar varName)
return $ maybe "" id varValue
resolve (Q t) = do tmplName <- resolve t
body <- asks (lookupTemplate tmplName)
return $ maybe "" show body
resolve (I t ds) = do tmplName <- resolve t
body <- asks (lookupTemplate tmplName)
case body of
Just t' -> do defs <- mapM resolveDef ds
local (addDefs defs) (resolve t')
Nothing -> return""
resolve (C ts) = (liftM concat) (mapM resolve ts)
type Envr = [(String, Int)]
lookp :: String -> Envr -> Maybe Int
lookp str env = lookup str env
envr :: Envr
envr = [("abc",1), ("def",2), ("hij",3)]
| egaburov/funstuff | Haskell/monads/readm.hs | apache-2.0 | 2,794 | 0 | 14 | 852 | 1,048 | 541 | 507 | 56 | 2 |
-- Demonstrates of a possible solution to expression problem in Haskell.
module Main where
import Prelude hiding (print)
-- Classes
class Document a where
load :: a -> IO ()
save :: a -> IO ()
class Printable a where
print :: a -> IO ()
-- Text Document
data TextDocument = TextDocument String
-- Document Interface
instance Document TextDocument where
load (TextDocument a) = putStrLn ("Loading TextDocument(" ++ a ++ ")...")
save (TextDocument a) = putStrLn ("Saving TextDocument(" ++ a ++ ")...")
-- Printable Interface
instance Printable TextDocument where
print (TextDocument a) = putStrLn ("Printing TextDocument(" ++ a ++ ")")
-- Drawing Document
data DrawingDocument = DrawingDocument String
-- Document Interface
instance Document DrawingDocument where
load (DrawingDocument a) = putStrLn ("Loading DrawingDocument(" ++ a ++ ")...")
save (DrawingDocument a) = putStrLn ("Saving DrawingDocument(" ++ a ++ ")...")
-- Printable Interface
instance Printable DrawingDocument where
print (DrawingDocument a) = putStrLn ("Printing DrawingDocument(" ++ a ++ ")")
-- Demonstration
test a = do
load a
save a
print a
main = do
putStrLn ""
test (TextDocument "text")
putStrLn ""
test (DrawingDocument "text")
putStrLn ""
| rizo/lambda-lab | expression-problem/haskell/expression-problem-1.hs | apache-2.0 | 1,307 | 0 | 9 | 271 | 373 | 186 | 187 | 29 | 1 |
module PopVox
( module X
) where
import PopVox.OpenSecrets
import PopVox.OpenSecrets.Output
import PopVox.OpenSecrets.Types
import PopVox.OpenSecrets.Utils
import PopVox.Types
import qualified PopVox.OpenSecrets as X
import qualified PopVox.OpenSecrets.Output as X
import qualified PopVox.OpenSecrets.Types as X
import qualified PopVox.OpenSecrets.Utils as X
import qualified PopVox.Types as X
| erochest/popvox-scrape | src/PopVox.hs | apache-2.0 | 480 | 0 | 4 | 128 | 81 | 58 | 23 | 12 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Yesod.Session.Redis (
localRedisSessionBackend,
redisSessionBackend
) where
import qualified Web.RedisSession as R
import Yesod.Core
import qualified Network.Wai as W
import Web.Cookie
import Control.Monad.Trans (liftIO)
import Data.Maybe (fromMaybe)
import Data.Time (UTCTime, addUTCTime)
import Data.Conduit.Pool (Pool)
import Data.Binary
import Data.Text (Text)
import Data.Text.Encoding
import Control.Monad (liftM)
instance Binary Text where
put = put . encodeUtf8
get = liftM decodeUtf8 get
sessionName = "yesodSession"
loadRedisSession :: (Yesod master) => Pool R.Redis -> master -> W.Request -> UTCTime -> IO BackendSession
loadRedisSession pool _ req now = do
let val = do
raw <- lookup "Cookie" $ W.requestHeaders req
lookup sessionName $ parseCookies raw
case val of
Nothing -> return []
Just s -> fmap (fromMaybe []) $ liftIO $ R.getSession pool s
saveRedisSession :: (Yesod master) => Pool R.Redis -> Int -> master -> W.Request -> UTCTime -> BackendSession -> BackendSession -> IO [Header]
saveRedisSession pool timeout master req now _ sess = do
let val = do
raw <- lookup "Cookie" $ W.requestHeaders req
lookup sessionName $ parseCookies raw
key <- case val of
Nothing -> R.newKey
Just k -> return k
R.setSessionExpiring pool key sess timeout
return [AddCookie def {
setCookieName = sessionName,
setCookieValue = key,
setCookiePath = Just $ cookiePath master,
setCookieExpires = Just expires,
setCookieDomain = cookieDomain master,
setCookieHttpOnly = True
}]
where
expires = fromIntegral (timeout * 60) `addUTCTime` now
localRedisSessionBackend :: (Yesod master) => Int -> IO (SessionBackend master)
localRedisSessionBackend = sessionBackend R.makeRedisLocalConnectionPool
redisSessionBackend :: (Yesod master) => String -> String -> Int -> IO (SessionBackend master)
redisSessionBackend server port = sessionBackend (R.makeRedisConnectionPool server port)
sessionBackend :: (Yesod master) => IO (Pool R.Redis) -> Int -> IO (SessionBackend master)
sessionBackend mkPool timeout = do
pool <- mkPool
return $ SessionBackend {
sbSaveSession = saveRedisSession pool timeout,
sbLoadSession = loadRedisSession pool
}
| scan/redissession | Yesod/Session/Redis.hs | bsd-2-clause | 2,239 | 26 | 15 | 368 | 754 | 389 | 365 | 55 | 2 |
module HEP.Physics.MSSM.Model.Common where
newtype Sign = Sign Bool
deriving (Show,Eq,Ord)
-- |
sgnplus :: Sign
sgnplus = Sign True
-- |
sgnminus :: Sign
sgnminus = Sign False
-- |
toInt :: Sign -> Int
toInt (Sign True) = 1
toInt (Sign False) = -1
-- |
fromInt :: Int -> Sign
fromInt i = Sign (i >= 0)
| wavewave/MSSMType | src/HEP/Physics/MSSM/Model/Common.hs | bsd-2-clause | 323 | 0 | 7 | 80 | 128 | 72 | 56 | 12 | 1 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
module Test.ZM.ADT.LeastSignificantFirst.K20ffacc8f8c9 (LeastSignificantFirst(..)) where
import qualified Prelude(Eq,Ord,Show)
import qualified GHC.Generics
import qualified Flat
import qualified Data.Model
newtype LeastSignificantFirst a = LeastSignificantFirst a
deriving (Prelude.Eq, Prelude.Ord, Prelude.Show, GHC.Generics.Generic, Flat.Flat)
instance ( Data.Model.Model a ) => Data.Model.Model ( LeastSignificantFirst a )
| tittoassini/typed | test/Test/ZM/ADT/LeastSignificantFirst/K20ffacc8f8c9.hs | bsd-3-clause | 496 | 0 | 7 | 53 | 122 | 75 | 47 | 10 | 0 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UndecidableInstances #-}
module FPNLA.Operations.BLAS.Strategies.GEMV (
) where
import FPNLA.Matrix (asColumn_vm, toCols_vm)
import FPNLA.Operations.BLAS (Elt(), GEMM (gemm), GEMV (gemv))
import FPNLA.Operations.Parameters (ResM, TransType (..), blasResultV,
getResultDataM)
instance (Elt e, GEMM s m v e) => GEMV s m v e where
gemv strat tmA vB alpha beta vC =
blasResultV . head . toCols_vm. getResultDataM $
call_gemm tmA pmB alpha beta pmC
where pmB = NoTrans $ asColumn_vm vB
pmC = asColumn_vm vC
call_gemm mA mB alpha beta mC = gemm strat mA mB alpha beta mC :: ResM s v m e
| mauroblanco/fpnla-examples | src/FPNLA/Operations/BLAS/Strategies/GEMV.hs | bsd-3-clause | 873 | 0 | 9 | 274 | 224 | 127 | 97 | 16 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
module Web.ChatWork.Endpoints.My (
statusEndpoint
, MyStatus(..)
, Task(..)
, tasksEndpoint
) where
import Data.Aeson
import Data.ByteString.Char8
import GHC.Generics
import Web.ChatWork.Endpoints.Base
import Web.ChatWork.Internal as I
import Web.ChatWork.Endpoints.TaskAccount
data MyStatus = MyStatus {
unreadRoomNum :: Int
, mentionRoomNum :: Int
, mytaskRoomNum :: Int
, unreadNum :: Int
, mytaskNum :: Int
} deriving (Show, Generic)
instance FromJSON MyStatus where
parseJSON = I.parseJSON
statusEndpoint :: String
statusEndpoint = baseURL ++ "/my/status"
data Status = Open | Done
deriving (Show, Generic)
instance FromJSON Status where
parseJSON value = case value of
String "open" -> return Open
String "done" -> return Done
instance ToJSON Status where
toJSON n = case n of
Open -> String "open"
Done -> String "done"
data Task = Task {
taskId :: Int
, room :: TaskRoom
, assignedByAccount :: TaskAccount
, messageId :: Int
, body :: String
, limitTime :: Int
, status :: Status
} deriving (Show, Generic)
instance FromJSON Task where
parseJSON = I.parseJSON
data TaskRoom = TaskRoom {
roomId :: Int
, name :: String
, iconPath :: String
} deriving (Show, Generic)
instance FromJSON TaskRoom where
parseJSON = I.parseJSON
tasksEndpoint :: String
tasksEndpoint = baseURL ++ "/my/tasks"
| eiel/haskell-chatwork | src/Web/ChatWork/Endpoints/My.hs | bsd-3-clause | 1,452 | 0 | 9 | 294 | 396 | 232 | 164 | 54 | 1 |
module Examples where
import Lang.Lam.Syntax
import FP
import qualified FP.Pretty as P
import qualified Lang.Lam.Analyses as A
import Lang.Lam.Passes.B_CPSConvert
formatResults :: Doc -> Doc
formatResults = localSetL P.maxColumnWidthL 120 . localSetL P.maxRibbonWidthL 120
doConfig :: Exp -> [String] -> [String] -> [String] -> [String] -> [String] -> [String] -> [String] -> Doc
doConfig e modes gcs createClosures lexTimeFilter dynTimeFilter μs monads =
let (se, c) = stampCPS e
in P.vsep
[ P.heading "Source"
, localSetL P.maxRibbonWidthL 40 $ pretty e
, P.heading "Stamped"
, localSetL P.maxRibbonWidthL 40 $ pretty se
, P.heading "CPS"
, localSetL P.maxRibbonWidthL 40 $ pretty c
, P.vsep $ mapOn (A.allE modes gcs createClosures lexTimeFilter dynTimeFilter μs monads) $ uncurry $ \ n f -> P.vsep
[ P.heading n
, formatResults $ f c
]
]
simpleKCFA :: Exp
simpleKCFA =
llet "id" (lam "x" $ v "x") $
iif someBool
(v "id" $# int 1)
(v "id" $# int 2)
simpleMCFA :: Exp
simpleMCFA =
llet "g" (lam "x" $ lam "y" $
iif (gez (v "x")) (int 100) (int 200)) $
llet "ff" (lam "f" $ v "f" @# int 0) $
iif someBool
(v "ff" $# v "g" @# int 1)
(v "ff" $# v "g" @# int (-1))
simpleLexicalTime :: Exp
simpleLexicalTime =
llet "ff" (lam "f" $ lam "x" $ v "f" @# v "x") $
llet "g" (lam "x" $ gez $ v "x") $
llet "h" (lam "x" $ gez $ v "x") $
iif someBool
(v "ff" @# v "g" @# int 1)
(v "ff" @# v "h" @# int (-1))
examplesMain :: IO ()
examplesMain =
pprint $ P.vsep
[ return ()
-- , doConfig simpleKCFA ["abstract"] ["no"] ["link"] ["location"] ["location"] ["0-cfa", "1k-cfa"] ["fi"]
-- , doConfig simpleMCFA ["abstract"] ["no"] ["link", "copy"] ["location"] ["location"] ["1k-cfa"] ["fi"]
, doConfig simpleLexicalTime ["abstract"] ["no"] ["link"] ["app"] ["app"] ["1k-cfa", "1o-cfa"] ["fi"]
]
| davdar/quals | src/Examples.hs | bsd-3-clause | 1,966 | 0 | 15 | 496 | 761 | 385 | 376 | 48 | 1 |
{-# LANGUAGE CPP #-}
-------------------------------------------------------------------------------
-- |
-- Copyright : (c) 2010 Eugene Kirpichov, Dmitry Astapov
-- License : BSD3
--
-- Maintainer : Eugene Kirpichov <[email protected]>,
-- Dmitry Astapov <[email protected]>
-- Stability : experimental
-- Portability : GHC only (STM, GHC.Conc for unsafeIOToSTM)
--
-- This module provides a binding to the greg distributed logger,
-- which provides a high-precision global time axis and is very performant.
--
-- See project home page at <http://code.google.com/p/greg> for an explanation
-- of how to use the server, the features, motivation and design.
--
module System.Log.Greg (
Configuration(..)
,logMessage
,withGregDo
,defaultConfiguration
) where
import System.Log.PreciseClock
import System.Posix.Clock
import Data.ByteString.Unsafe
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as L
import Data.Binary
import Data.Binary.Put
import Network
import Network.HostName (getHostName)
import System.UUID.V4
import System.IO
import Foreign
#ifdef DEBUG
import Debug.Trace
#endif
import qualified Control.Exception as E
import Control.Concurrent
import Control.Concurrent.STM
import GHC.Conc
import Control.Monad
{-
Messages are stored in TChan
1 thread performs calibration
1 'packer' thread takes messages from tchan and offloads them to sender thread(s).
1 'checking' thread keeps an eye on TChan size, initiates message dropping if necessary.
1 'sender' thread delivers the batch of messages to the server
-}
data Record = Record {
timestamp :: TimeSpec,
message :: B.ByteString
}
data GregState = GregState {
configuration :: Configuration,
records :: TChan Record, -- FIFO for queued Records
numRecords :: TVar Int, -- How many records are in FIFO
isDropping :: TVar Bool, -- True is we are not adding records to the FIFO since there are more than 'maxBufferedRecords' of them
packet :: TMVar [Record] -- Block of records we are currently trying to send
}
-- | Client configuration.
-- You probably only need to change @server@.
data Configuration = Configuration {
server :: String -- ^ Server hostname (default @localhost@)
,port :: Int -- ^ Message port (default @5676@)
,calibrationPort :: Int -- ^ Calibration port (default @5677@)
,flushPeriodMs :: Int -- ^ How often to send message batches to server
-- (default @1000@)
,clientId :: String -- ^ Arbitrary identifier, will show up in logs.
-- For example, @\"DataService\"@
-- (default @\"unknown\"@)
,maxBufferedRecords :: Int -- ^ How many records to store between flushes
-- (more will be dropped) (default @100000@)
,useCompression :: Bool -- ^ Whether to use gzip compression
-- (default @False@, @True@ is unsupported)
,calibrationPeriodSec :: Int -- ^ How often to initiate calibration exchanges
-- (default @10@)
}
hostname, ourUuid :: B.ByteString
hostname = B.pack $ unsafePerformIO getHostName
ourUuid = repack . runPut . put $ unsafePerformIO uuid
-- | The default configuration, suitable for most needs.
defaultConfiguration :: Configuration
defaultConfiguration = Configuration {
server = "localhost",
port = 5676,
calibrationPort = 5677,
flushPeriodMs = 1000,
clientId = "unknown",
maxBufferedRecords = 100000,
useCompression = True,
calibrationPeriodSec = 10
}
-- | Perform an IO action with logging (will wait for all messages to flush).
withGregDo :: Configuration -> IO () -> IO ()
withGregDo conf realMain = withSocketsDo $ do
st <- atomically $ do st <- readTVar state
let st' = st{configuration = conf}
writeTVar state $ st'
return st'
let everyMs ms action = forkIO $ forever (action >> threadDelay (1000 * ms))
let safely action label = action `E.catch` \e -> putStrLnT ("Error in " ++ label ++ ": " ++ show (e::E.SomeException))
let safelyEveryMs ms action label = everyMs ms (safely action label)
-- Packer thread offloads records to sender thread
-- Housekeeping thread keeps queue size at check
calTID <- safelyEveryMs (1000*calibrationPeriodSec conf) (initiateCalibrationOnce st) "calibrator"
packTID <- safelyEveryMs ( flushPeriodMs conf) (packRecordsOnce st) "packer"
checkTID <- safelyEveryMs ( flushPeriodMs conf) (checkQueueSize st) "queue size checker"
sendTID <- safelyEveryMs ( flushPeriodMs conf) (sendPacketOnce st) "sender"
realMain
putStrLnT "Flushing remaining messages"
-- Shutdown. For now, just wait untill all messages are out of the queue
-- 1. Stop reception of new messages
killThread checkTID
atomically $ writeTVar (isDropping st) True
-- 2. Wait until all messages are sent
let waitFlush = do
numrs <- atomically $ readTVar (numRecords st)
unless (numrs == 0) $ threadDelay (1000*flushPeriodMs conf) >> waitFlush
waitFlush
killThread packTID
atomically $ putTMVar (packet st) []
let waitSend = do
sent <- atomically $ isEmptyTMVar (packet st)
unless sent $ threadDelay (1000*flushPeriodMs conf) >> waitSend
waitSend
killThread sendTID
killThread calTID
putStrLnT "Shutdown finished."
checkQueueSize :: GregState -> IO ()
checkQueueSize st = do
currsize <- atomically $ readTVar (numRecords st)
let maxrs = maxBufferedRecords (configuration st)
droppingNow <- atomically $ readTVar (isDropping st)
case (droppingNow, currsize > maxrs) of
(True , True) -> putStrLnT ("Still dropping (queue " ++ show currsize ++ ")")
(False, True) -> do putStrLnT ("Started to drop (queue " ++ show currsize ++ ")")
atomically $ writeTVar (isDropping st) True
(True, False) -> do putStrLnT ("Stopped dropping (queue " ++ show currsize ++ ")")
atomically $ writeTVar (isDropping st) False
(False, False) -> return () -- everything is OK
packRecordsOnce :: GregState -> IO ()
packRecordsOnce st = atomically $ do
putStrLnT $ "Packing: reading all messages ..."
rs <- readAtMost (10000::Int) -- Mandated by protocol
putStrLnT $ "Packing: reading all messages done (" ++ show (length rs) ++ ")"
unless (null rs) $ do
putStrLnT $ "Packing " ++ show (length rs) ++ " records"
atomModTVar (numRecords st) (\x -> x - length rs) -- decrease queue length
senderAccepted <- tryPutTMVar (packet st) rs -- putting messages in the outbox
unless senderAccepted retry
putStrLnT "Packing done"
where
readAtMost 0 = return []
readAtMost n = do
empty <- isEmptyTChan (records st)
if empty then return []
else do r <- readTChan (records st)
rest <- readAtMost (n-1)
return (r:rest)
sendPacketOnce :: GregState -> IO ()
sendPacketOnce st = atomically $ withWarning "Failed to pack/send records" $ do
rs <- takeTMVar $ packet st
unless (null rs) $ do
let conf = configuration st
putStrLnT "Pushing records"
unsafeIOToSTM $ E.bracket (connectTo (server conf) (PortNumber $ fromIntegral $ port conf)) hClose $ \hdl -> do
putStrLnT "Pushing records - connected"
let msg = formatRecords (configuration st) rs
putStrLnT $ "Snapshotted " ++ show (length rs) ++ " records --> " ++ show (B.length msg) ++ " bytes"
unsafeUseAsCStringLen msg $ \(ptr, len) -> hPutBuf hdl ptr len
hFlush hdl
putStrLnT $ "Pushing records - done"
where
withWarning s t = (t `catchSTM` (\e -> putStrLnT (s ++ ": " ++ show (e::E.SomeException)) >> check False)) `orElse` return ()
formatRecords :: Configuration -> [Record] -> B.ByteString
formatRecords conf records = repack . runPut $ do
putByteString ourUuid
putWord8 0
putWord32le (fromIntegral $ length $ clientId conf)
putByteString (B.pack $ clientId conf)
mapM_ putRecord records
putWord32le 0
putRecord :: Record -> Put
putRecord r = do
putWord32le 1
putWord64le (toNanos64 (timestamp r))
putWord32le (fromIntegral $ B.length hostname)
putByteString hostname
putWord32le (fromIntegral $ B.length (message r))
putByteString (message r)
initiateCalibrationOnce :: GregState -> IO ()
initiateCalibrationOnce st = do
putStrLnT "Initiating calibration"
let conf = configuration st
E.bracket (connectTo (server conf) (PortNumber $ fromIntegral $ calibrationPort conf)) hClose $ \hdl -> do
hSetBuffering hdl NoBuffering
putStrLnT "Calibration - connected"
unsafeUseAsCString ourUuid $ \p -> hPutBuf hdl p 16
allocaBytes 8 $ \pTheirTimestamp -> do
let whenM mp m = mp >>= \v -> when v m
loop = whenM (hSkipBytes hdl 8 pTheirTimestamp) $ do
ts <- preciseTimeSpec
let pOurTimestamp = repack $ runPut $ putWord64le (toNanos64 ts)
unsafeUseAsCString pOurTimestamp $ \ptr -> hPutBuf hdl ptr 8
-- putStrLnT "Calibration - next loop iteration passed"
loop
loop
putStrLnT "Calibration ended - sleeping"
state :: TVar GregState
state = unsafePerformIO $ do rs <- newTChanIO
numrs <- newTVarIO 0
dropping <- newTVarIO False
pkt <- newEmptyTMVarIO
newTVarIO $ GregState defaultConfiguration rs numrs dropping pkt
-- | Log a message. The message will show up in server's output
-- annotated with a global timestamp (client's clock offset does
-- not matter).
logMessage :: String -> IO ()
logMessage s = do
t <- preciseTimeSpec
st <- atomically $ readTVar state
shouldDrop <- atomically $ readTVar (isDropping st)
unless shouldDrop $ atomically $ do
writeTChan (records st) (Record {timestamp = t, message = B.pack s})
atomModTVar (numRecords st) (+1)
--------------------------------------------------------------------------
-- Utilities
toNanos64 :: TimeSpec -> Word64
toNanos64 (TimeSpec s ns) = fromIntegral ns + 1000000000*fromIntegral s
hSkipBytes :: Handle -> Int -> Ptr a -> IO Bool
hSkipBytes _ 0 _ = return True
hSkipBytes h n p = do
closed <- hIsEOF h
if closed
then return False
else do skipped <- hGetBuf h p n
if skipped < 0
then return False
else hSkipBytes h (n-skipped) p
repack :: L.ByteString -> B.ByteString
repack = B.concat . L.toChunks
atomModTVar :: TVar a -> (a -> a) -> STM ()
atomModTVar var f = readTVar var >>= \val -> writeTVar var (f val)
putStrLnT :: (Monad m) => String -> m ()
#ifdef DEBUG
putStrLnT s = trace s $ return ()
#else
putStrLnT _ = return ()
#endif
#ifdef DEBUG
testFlood :: IO ()
testFlood = withGregDo defaultConfiguration $ forever $ logMessage "Hello" -- >> threadDelay 1000
testSequence :: IO ()
testSequence = withGregDo defaultConfiguration $ mapM_ (\x -> logMessage (show x) >> threadDelay 100000) [1..]
#endif
| jkff/greg | greg-clients/haskell/System/Log/Greg.hs | bsd-3-clause | 11,509 | 0 | 27 | 3,032 | 2,818 | 1,412 | 1,406 | 193 | 4 |
module Mask where
import Data.Char (toUpper)
import Data.Maybe (catMaybes, isJust)
import Data.List
newtype Mask = Mask [Maybe Char]
deriving (Eq, Show)
parseMask :: String -> Maybe Mask
parseMask = fmap Mask . mapM (aux . toUpper)
where
aux '.' = return Nothing
aux c | c `elem` alphabet = return (Just c)
aux _ = Nothing
maskElem :: Char -> Mask -> Bool
maskElem c (Mask xs) = any (Just c ==) xs
scrubMask :: Mask -> Mask
scrubMask (Mask xs) = Mask (map erase xs)
where
v = find isVowel (reverse (catMaybes xs))
erase c | v == c = c
| otherwise = Nothing
maskLetters :: Mask -> [Char]
maskLetters (Mask xs) = nub (sort (catMaybes xs))
trailingConsonants :: Mask -> Int
trailingConsonants (Mask xs) =
length (takeWhile (maybe True (not . isVowel)) (reverse xs))
match :: Mask -> [Char] -> String -> Bool
match (Mask xs0) banned ys0 = aux xs0 ys0
where
aux (Nothing : xs) (y : ys) = y `notElem` banned && aux xs ys
aux (Just x : xs) (y : ys) = x == y && aux xs ys
aux [] [] = True
aux _ _ = False
alphabet :: String
alphabet = ['A'..'Z']
vowels :: String
vowels = "AEIOU"
isVowel :: Char -> Bool
isVowel x = x `elem` vowels
-- * Mask editing functions
extendMask :: Mask -> Maybe Char -> Mask -> Maybe Mask
extendMask (Mask template) c (Mask input) = fmap Mask (aux template input)
where
aux (_ : xs) (y : ys) = fmap (y :) (aux xs ys)
aux [] _ = Nothing
aux (_ : xs) _ = Just (c : takeWhile isJust xs)
generateMaskPrefix :: Mask -> Mask
generateMaskPrefix (Mask template) = Mask (takeWhile isJust template)
| glguy/vty-hangman-helper | Mask.hs | bsd-3-clause | 1,697 | 0 | 11 | 483 | 741 | 382 | 359 | 42 | 4 |
module Basil.Database.InMemory ( module Basil.Database.InMemory.Interface,
module Basil.Database.InMemory.Cache,
) where
import Basil.Database.InMemory.Cache
import Basil.Database.InMemory.Interface
| chriseidhof/Basil | src/Basil/Database/InMemory.hs | bsd-3-clause | 266 | 0 | 5 | 80 | 40 | 29 | 11 | 4 | 0 |
module GrammarLexer
( GrammarLexeme(..)
, runLexer
) where
import Data.Char
import Data.Monoid ((<>))
import Control.Applicative ((<*>), (<*), (*>))
import Text.Parsec.String (Parser)
import Text.Parsec.Char
import Text.Parsec.Prim
import Text.Parsec.Combinator
import Text.Parsec.Error (ParseError)
import Text.Parsec.Pos (SourcePos)
data GrammarLexeme = NonTerm String
| Term String
| Typename String
| StringLiteral String
| RegexLiteral String
| CodeBlock String
| ParamsBlock String
| Colon
| Semicolon
| Comma
| Divider
| ReturnArrow
| DoubleColon
| LeftSquare
| RightSquare
deriving Eq
instance Show GrammarLexeme where
show (NonTerm s) = s
show (Term s) = s
show (StringLiteral s) = s
show (RegexLiteral s) = s
show (CodeBlock s) = s
show (ParamsBlock s) = s
show Colon = ":"
show Semicolon = ";"
show Comma = ","
show Divider = "|"
show ReturnArrow = "->"
show DoubleColon = "::"
show LeftSquare = "["
show RightSquare = "]"
instance Monoid a => Monoid (ParsecT s u m a) where
mappend a b = mappend <$> a <*> b
mempty = return mempty
skipSpaces :: Parser a -> Parser a
skipSpaces p = p <* spaces
surround :: Parser a -> Parser b -> Parser b
surround p q = p *> q <* p
parsePos :: Parser a -> Parser (SourcePos, a)
parsePos p = (,) <$> getPosition <*> p
tokenize :: Parser [(SourcePos, GrammarLexeme)]
tokenize = spaces *> many1 (parsePos grammarLexeme) <* eof
grammarLexeme :: Parser GrammarLexeme
grammarLexeme = choice $ map (try . skipSpaces)
[ nonTerm
, term
, stringLiteral
, regexLiteral
, codeBlock
, paramsBlock
, dcolon
, semicolon
, comma
, divider
, arrow
, lsq
, rsq
, colon
, lineComment
]
lineComment :: Parser GrammarLexeme
lineComment = string "//" *> many (noneOf "\r\n") *> spaces *> grammarLexeme
nonTerm :: Parser GrammarLexeme
nonTerm = NonTerm <$> ((:) <$> lower <*> many (try alphaNum <|> char '_'))
term :: Parser GrammarLexeme
term = Term <$> ((:) <$> upper <*> many (try alphaNum <|> char '_'))
stringLiteral :: Parser GrammarLexeme
stringLiteral = StringLiteral <$> surround (char '\'') (escapedString "'")
regexLiteral :: Parser GrammarLexeme
regexLiteral = RegexLiteral <$> surround (char '/') (escapedString1 "/")
codeBlock :: Parser GrammarLexeme
codeBlock = CodeBlock <$> (char '{' *> bracesText "{" "}" <* char '}')
paramsBlock :: Parser GrammarLexeme
paramsBlock = ParamsBlock <$> (char '(' *> bracesText "(" ")" <* char ')')
bracesText :: String -> String -> Parser String
bracesText lb rb = noBrace <> (concat <$> many (string lb <> bracesText lb rb <> string rb <> noBrace))
where noBrace = many (noneOf (lb ++ rb))
colon, semicolon, comma, divider, arrow, rsq, lsq, dcolon :: Parser GrammarLexeme
colon = char ':' *> pure Colon
semicolon = char ';' *> pure Semicolon
comma = char ',' *> pure Comma
divider = char '|' *> pure Divider
lsq = char '[' *> pure LeftSquare
rsq = char ']' *> pure RightSquare
arrow = string "->" *> pure ReturnArrow
dcolon = string "::" *> pure DoubleColon
escapedChar :: String -> Parser Char
escapedChar s = try (char '\\' *> oneOf ('\\' : s))
<|> char '\\'
escapedString, escapedString1 :: String -> Parser String
escapedString s = many (noneOf ('\\' : s) <|> escapedChar s)
escapedString1 s = many1 (noneOf ('\\' : s) <|> escapedChar s)
runLexer :: String -> Either ParseError [(SourcePos, GrammarLexeme)]
runLexer = parse tokenize ""
| flyingleafe/parser-gen | src/GrammarLexer.hs | bsd-3-clause | 4,002 | 0 | 13 | 1,236 | 1,266 | 671 | 595 | 105 | 1 |
module TestTypechecker where
import Test.HUnit
import Parser
import Typechecker
intCheck :: Assertion
intCheck = parseAndTypecheck "60" "int"
if0Check :: Assertion
if0Check = parseAndTypecheck "if0 5 then <> else <>" "unit"
absCheck :: Assertion
absCheck = parseAndTypecheck "func [] (x:int) . x" "forall [] (int) -> int"
-- The following should fail, since we need a type annotation for the function
-- appCheck :: Assertion
-- appCheck = parseAndTypecheck "((func [] (x:int) . x) [] 12)" "int"
annotatedAppCheck :: Assertion
annotatedAppCheck = parseAndTypecheck "(((func [] (x:int) . x) : forall [] (int) -> int) [] (12))" "int"
tyAppCheck :: Assertion
tyAppCheck = parseAndTypecheck "(((func [α] (x:α) . x) : forall [α] (α) -> α) [int] (13))" "int"
handleCheck :: Assertion
handleCheck = parseAndTypecheck "handle (1; x.2)" "int"
raiseCheck :: Assertion
raiseCheck = parseAndTypecheck "raise [int] 1" "int"
letCheck :: Assertion
letCheck = parseAndTypecheck "let x = 1 in <>" "unit"
letAnnoCheck :: Assertion
letAnnoCheck = parseAndTypecheck "let x = (1:int) in <>" "unit"
parseAndTypecheck :: String -> String -> Assertion
parseAndTypecheck ex typ =
case (parseExpr ex, parseType typ) of
(Left err, _) -> assertFailure $ show err
(_, Left err) -> assertFailure $ show err
(Right e, Right τ) -> (typeCheckProgram e τ) @?= (Right True)
| phillipm/mlish-to-llvm | test/TestTypechecker.hs | bsd-3-clause | 1,374 | 0 | 10 | 234 | 272 | 145 | 127 | 28 | 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.