code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE QuasiQuotes #-}
module Graphics.Urho3D.Resource.Image(
Image
, SharedImage
, WeakImage
, VectorSharedImagePtr
, PODVectorImagePtr
, imageContext
, imageSavePNG
, imageSetSize2D
, imageSetData
, imageSetPixel2D
, imageSetPixel2DInt
, imageGetPixel2D
, imageGetPixel2DInt
, imageGetWidth
, imageGetHeight
, imageGetDepth
, imageGetComponents
, imageGetData
) where
import qualified Language.C.Inline as C
import qualified Language.C.Inline.Cpp as C
import Data.Monoid
import Foreign
import Foreign.C.String
import Graphics.Urho3D.Container.ForeignVector
import Graphics.Urho3D.Container.Ptr
import Graphics.Urho3D.Container.Vector
import Graphics.Urho3D.Core.Context
import Graphics.Urho3D.Creatable
import Graphics.Urho3D.Math.Color
import Graphics.Urho3D.Math.StringHash
import Graphics.Urho3D.Monad
import Graphics.Urho3D.Resource.Internal.Image
import Graphics.Urho3D.Resource.Resource
C.context (C.cppCtx
<> imageCntx
<> contextContext
<> resourceContext
<> colorContext
<> sharedImagePtrCntx
<> weakImagePtrCntx
<> podVectorImagePtrCntx
)
C.include "<Urho3D/Resource/Image.h>"
C.using "namespace Urho3D"
C.verbatim "typedef Vector<SharedPtr<Image> > VectorSharedImagePtr;"
imageContext :: C.Context
imageContext = imageCntx
<> resourceContext
<> sharedImagePtrCntx
sharedPtr "Image"
sharedWeakPtr "Image"
podVectorPtr "Image"
newImage :: Ptr Context -> IO (Ptr Image)
newImage ptr = [C.exp| Image* { new Image( $(Context* ptr) ) } |]
deleteImage :: Ptr Image -> IO ()
deleteImage ptr = [C.exp| void { delete $(Image* ptr) } |]
instance Creatable (Ptr Image) where
type CreationOptions (Ptr Image) = Ptr Context
newObject = liftIO . newImage
deleteObject = liftIO . deleteImage
instance ResourceType Image where
resourceType _ = StringHash . fromIntegral $ [C.pure| unsigned int { Image::GetTypeStatic().Value() } |]
instance Creatable (Ptr VectorSharedImagePtr) where
type CreationOptions (Ptr VectorSharedImagePtr) = ()
newObject _ = liftIO [C.exp| VectorSharedImagePtr* { new VectorSharedImagePtr() } |]
deleteObject ptr = liftIO $ [C.exp| void {delete $(VectorSharedImagePtr* ptr)} |]
instance ReadableVector VectorSharedImagePtr where
type ReadVecElem VectorSharedImagePtr = SharedPtr Image
foreignVectorLength ptr = fromIntegral <$>
liftIO [C.exp| unsigned int {$(VectorSharedImagePtr* ptr)->Size()} |]
foreignVectorElement ptr i = liftIO $ do
let i' = fromIntegral i
peekSharedPtr =<< [C.exp| SharedImage* { new SharedPtr<Image>((*$(VectorSharedImagePtr* ptr))[$(int i')]) } |]
instance WriteableVector VectorSharedImagePtr where
type WriteVecElem VectorSharedImagePtr = SharedPtr Image
foreignVectorAppend ptr sp = liftIO $ do
let p = pointer sp
[C.exp| void { $(VectorSharedImagePtr* ptr)->Push(SharedPtr<Image>($(Image* p))) } |]
-- | Saves image as PNG
imageSavePNG :: (Pointer p a, Parent Image a, MonadIO m) => p -- ^ Pointer to image or child
-> String -- ^ path to file with extension
-> m ()
imageSavePNG p path = liftIO $ withCString path $ \path' -> do
let ptr = parentPointer p
[C.exp| void {$(Image* ptr)->SavePNG(String($(const char* path')))} |]
-- | Set 2D size and number of color components. Old image data will be destroyed and new data is undefined. Return true if successful.
imageSetSize2D :: (Pointer p a, Parent Image a, MonadIO m)
=> p -- ^ Pointer to image or child
-> Int -- ^ width
-> Int -- ^ height
-> Word -- ^ components
-> m Bool
imageSetSize2D p width height components = liftIO $ do
let ptr = parentPointer p
width' = fromIntegral width
height' = fromIntegral height
components' = fromIntegral components
toBool <$> [C.exp| int {(int)$(Image* ptr)->SetSize($(int width'), $(int height'), $(unsigned int components'))} |]
-- | Set new image data.
imageSetData :: (Pointer p a, Parent Image a, MonadIO m)
=> p -- ^ Pointer to image or child
-> Ptr () -- ^ Pixel data
-> m ()
imageSetData p datum = liftIO $ do
let ptr = parentPointer p
datum' = castPtr datum
[C.exp| void {$(Image* ptr)->SetData($(const unsigned char* datum'))} |]
-- | Set a 2D pixel.
imageSetPixel2D :: (Pointer p a, Parent Image a, MonadIO m)
=> p -- ^ Pointer to image or child
-> Int -- ^ x
-> Int -- ^ y
-> Color -- ^ color
-> m ()
imageSetPixel2D p x y color = liftIO $ with color $ \color' -> do
let ptr = parentPointer p
x' = fromIntegral x
y' = fromIntegral y
[C.exp| void {$(Image* ptr)->SetPixel($(int x'), $(int y'), *$(Color* color'))} |]
-- | Set a 2D pixel with an integer color. R component is in the 8 lowest bits.
imageSetPixel2DInt :: (Pointer p a, Parent Image a, MonadIO m)
=> p -- ^ Pointer to image or child
-> Int -- ^ x
-> Int -- ^ y
-> Word -- ^ uint color
-> m ()
imageSetPixel2DInt p x y uintColor = liftIO $ do
let ptr = parentPointer p
x' = fromIntegral x
y' = fromIntegral y
uintColor' = fromIntegral uintColor
[C.exp| void {$(Image* ptr)->SetPixelInt($(int x'), $(int y'), $(unsigned int uintColor'))} |]
-- | Return a 2D pixel color.
imageGetPixel2D :: (Pointer p a, Parent Image a, MonadIO m)
=> p -- ^ Pointer to image or child
-> Int -- ^ x
-> Int -- ^ y
-> m Color
imageGetPixel2D p x y = liftIO $ alloca $ \color' -> do
let ptr = parentPointer p
x' = fromIntegral x
y' = fromIntegral y
[C.block| void {
*$(Color* color') = $(Image* ptr)->GetPixel($(int x'), $(int y'));
} |]
peek color'
-- | Return a 2D pixel integer color. R component is in the 8 lowest bits.
imageGetPixel2DInt :: (Pointer p a, Parent Image a, MonadIO m)
=> p -- ^ Pointer to image or child
-> Int -- ^ x
-> Int -- ^ y
-> m Word
imageGetPixel2DInt p x y = liftIO $ do
let ptr = parentPointer p
x' = fromIntegral x
y' = fromIntegral y
fromIntegral <$> [C.exp| unsigned int {$(Image* ptr)->GetPixelInt($(int x'), $(int y'))} |]
-- | Return width.
imageGetWidth :: (Pointer p a, Parent Image a, MonadIO m)
=> p -- ^ Pointer to image or child
-> m Int
imageGetWidth p = liftIO $ do
let ptr = parentPointer p
fromIntegral <$> [C.exp| int {$(Image* ptr)->GetWidth()} |]
-- | Return height.
imageGetHeight :: (Pointer p a, Parent Image a, MonadIO m)
=> p -- ^ Pointer to image or child
-> m Int
imageGetHeight p = liftIO $ do
let ptr = parentPointer p
fromIntegral <$> [C.exp| int {$(Image* ptr)->GetHeight()} |]
-- | Return depth.
imageGetDepth :: (Pointer p a, Parent Image a, MonadIO m)
=> p -- ^ Pointer to image or child
-> m Int
imageGetDepth p = liftIO $ do
let ptr = parentPointer p
fromIntegral <$> [C.exp| int {$(Image* ptr)->GetDepth()} |]
-- | Return number of color components..
imageGetComponents :: (Pointer p a, Parent Image a, MonadIO m)
=> p -- ^ Pointer to image or child
-> m Word
imageGetComponents p = liftIO $ do
let ptr = parentPointer p
fromIntegral <$> [C.exp| int {$(Image* ptr)->GetComponents()} |]
-- | Return number of color components..
imageGetData :: (Pointer p a, Parent Image a, MonadIO m)
=> p -- ^ Pointer to image or child
-> m (Ptr ())
imageGetData p = liftIO $ do
let ptr = parentPointer p
castPtr <$> [C.exp| unsigned char* {$(Image* ptr)->GetData()} |]
|
Teaspot-Studio/Urho3D-Haskell
|
src/Graphics/Urho3D/Resource/Image.hs
|
mit
| 7,300 | 0 | 14 | 1,436 | 1,779 | 961 | 818 | -1 | -1 |
{- |
Module representing a JSON-API meta object.
Specification: <http://jsonapi.org/format/#document-meta>
-}
module Network.JSONApi.Meta
( Meta
, MetaObject (..)
, mkMeta
)where
import Data.Aeson (ToJSON, FromJSON, Object, toJSON)
import Data.HashMap.Strict as HM
import Data.Text (Text)
import qualified GHC.Generics as G
import Control.DeepSeq (NFData)
{- |
Type representing a JSON-API meta object.
Meta is an abstraction around an underlying Map consisting of
resource-specific metadata.
Example JSON:
@
"meta": {
"copyright": "Copyright 2015 Example Corp.",
"authors": [
"Andre Dawson",
"Kirby Puckett",
"Don Mattingly",
"Ozzie Guillen"
]
}
@
Specification: <http://jsonapi.org/format/#document-meta>
-}
data Meta = Meta Object
deriving (Show, Eq, G.Generic)
instance NFData Meta
instance ToJSON Meta
instance FromJSON Meta
instance Semigroup Meta where
(<>) (Meta a) (Meta b) = Meta $ HM.union a b
instance Monoid Meta where
mempty = Meta $ HM.empty
{- |
Convienience class for constructing a Meta type
Example usage:
@
data Pagination = Pagination
{ currentPage :: Int
, totalPages :: Int
} deriving (Show, Generic)
instance ToJSON Pagination
instance MetaObject Pagination where
typeName _ = "pagination"
@
-}
class (ToJSON a) => MetaObject a where
typeName :: a -> Text
{- |
Convienience constructor function for the Meta type
Useful on its own or in combination with Meta's monoid instance
Example usage:
See MetaSpec.hs for an example
-}
mkMeta :: (MetaObject a) => a -> Meta
mkMeta obj = Meta $ HM.singleton (typeName obj) (toJSON obj)
|
toddmohney/json-api
|
src/Network/JSONApi/Meta.hs
|
mit
| 1,634 | 0 | 8 | 314 | 266 | 149 | 117 | 22 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Parser
{-
( Language
, Word
, Transcription
, WordIdentity
, WordCard
, Section
, Row
, Parser
, ParseError
, runP
, file
) -}
where
import Data.Text as T (Text, strip, pack, unpack, concat, append)
import Prelude hiding (words, Word)
import Text.ParserCombinators.Parsec hiding (spaces, label)
import Control.Monad
import Text.Parsec.Prim
--- READ http://www.serpentine.com/blog/2007/01/31/parsing-a-simple-config-file-in-haskell/
--- YOU STUPID MORON TO FIGURE OUT WHETHER TO PARSE INTO MAP AT ONCE OR LATER
type Language = T.Text
type Word = T.Text
type Transcription = T.Text
skipSpaces = skipMany (char ' ')
-- tries to match \\ and return the echoed character
tryMatchEcho :: GenParser Char st Char
tryMatchEcho = Text.ParserCombinators.Parsec.try $ string "\\" >> anyChar
type WordIdentity = (Word, Maybe Transcription)
lexemeWord :: GenParser Char st WordIdentity
lexemeWord =
do
word <- many (tryMatchEcho <|> noneOf "[|,=\n")
(transcription :: Maybe String)
<- optionMaybe . between (char '[') (char ']') . many $
tryMatchEcho <|> noneOf "]"
skipSpaces
return (T.strip . T.pack $ word, T.strip . T.pack <$> transcription)
<?> "a word (spaces are allowed)"
type WordCard = (Word, [WordIdentity])
lexemeWordKind :: GenParser Char st WordCard
lexemeWordKind =
do
head@(wd, tr) <- lexemeWord
tail <- option [] (char '|' >> sepBy lexemeWord (char '|'))
return (wd, head:tail)
words :: GenParser Char st [WordCard]
words = skipSpaces >> sepBy lexemeWordKind (char ',') <?> "a list of words delimited by ','"
type Section = (Language, [WordCard])
section :: GenParser Char st Section
section =
do skipSpaces
lang <- many (noneOf " [|,=\n\r") -- many1 generates error on case parseTest line "= "
ws <- words
return (T.pack lang, ws)
<?> "=lang"
type Row = [Section]
line :: GenParser Char st Row
line =
do skipSpaces
result <- choice [ char '=' >> sepBy section (char '=')
, skipMany (noneOf "\n") >> return []
]
-- char ('\n')
return result
file :: GenParser Char st [Row]
file =
do
val <- sepBy line (char '\n')
eof
return val
|
erithion/duo-memo
|
Parser.hs
|
mit
| 2,362 | 0 | 15 | 613 | 645 | 341 | 304 | 54 | 1 |
module Main where
import Test.Tasty
import Test.Tasty.HUnit
import Hummingbird.Administration.Request
import Network.MQTT.Broker.Session (SessionIdentifier (..))
main :: IO ()
main = defaultMain $ testGroup "Hummingbird"
[ parserTests
]
parserTests :: TestTree
parserTests = testGroup "Request Parser"
[ testCase "help #01" $ assertEqual "" (Right Help) $ parse "help"
, testCase "help #02" $ assertEqual "" (Right Help) $ parse " help \t "
, testCase "help #03" $ assertEqual "" (Right Help) $ parse "\nhelp\t\n\n\t "
, testCase "broker #01" $ assertEqual "" (Right BrokerStatus) $ parse "broker"
, testCase "config #01" $ assertEqual "" (Right ConfigStatus) $ parse "config"
, testCase "config #02" $ assertEqual "" (Right ConfigStatus) $ parse "config status"
, testCase "config #03" $ assertEqual "" (Right ConfigReload) $ parse "config reload"
, testCase "auth #01" $ assertEqual "" (Right AuthStatus) $ parse "auth"
, testCase "auth #01" $ assertEqual "" (Right AuthStatus) $ parse "auth status"
, testCase "auth #03" $ assertEqual "" (Right AuthRestart) $ parse "auth restart"
, testCase "transports #01" $ assertEqual "" (Right TransportsStatus) $ parse "transport"
, testCase "transports #02" $ assertEqual "" (Right TransportsStatus) $ parse "transports"
, testCase "transports #03" $ assertEqual "" (Right TransportsStatus) $ parse "transports status"
, testCase "transports #04" $ assertEqual "" (Right TransportsStatus) $ parse "transport status"
, testCase "session #01" $ assertEqual "" (Right SessionList) $ parse "session"
, testCase "session #02" $ assertEqual "" (Right SessionList) $ parse "sessions"
, testCase "session #03" $ assertEqual "" (Right $ SessionStatus $ SessionIdentifier 0) $ parse "session 0"
, testCase "session #04" $ assertEqual "" (Right $ SessionStatus $ SessionIdentifier 0) $ parse "sessions 0"
, testCase "session #05" $ assertEqual "" (Right $ SessionStatus $ SessionIdentifier 234234223) $ parse "sessions 0000234234223"
, testCase "session #06" $ assertEqual "" (Right $ SessionStatus $ SessionIdentifier 0) $ parse "sessions 0 status"
, testCase "session #07" $ assertEqual "" (Right $ SessionDisconnect $ SessionIdentifier 0) $ parse "sessions 0 disconnect"
, testCase "session #08" $ assertEqual "" (Right $ SessionTerminate $ SessionIdentifier 0) $ parse "sessions 0 terminate"
, testCase "session #09" $ assertEqual "" (Right $ SessionSubscriptions $ SessionIdentifier 0) $ parse "sessions 0 subscriptions"
, testCase "quit #01" $ assertEqual "" (Right Quit) $ parse " quit "
, testCase "quit #02" $ assertEqual "" (Right Quit) $ parse " \r\n\t exit \n "
]
|
lpeterse/haskell-hummingbird
|
test/Main.hs
|
mit
| 2,913 | 0 | 12 | 694 | 835 | 400 | 435 | 35 | 1 |
module Main where
import Solidran.Mrna.Detail
main :: IO ()
main = getLine >>= (print . rnaCombsMod 1000000)
|
Jefffrey/Solidran
|
src/Solidran/Mrna/Main.hs
|
mit
| 111 | 0 | 8 | 19 | 39 | 22 | 17 | 4 | 1 |
{-# LANGUAGE TemplateHaskell, FlexibleInstances, OverloadedStrings, DeriveGeneric #-}
{-# LANGUAGE KindSignatures, DataKinds #-}
module EventProvider (
GlobalSettings(GlobalSettings), EventProvider(..), MemberType(..), ConfigValueType(..),
eventProviderWrap, getSettingsFolder, getDataFolder, deriveConfigRecord,
ConfigDataType(..), ConfigDataInfo(..), ContentType, FolderPath, Url) where
import Data.Time.Calendar
import Data.Aeson
import Language.Haskell.TH
import Data.ByteString (ByteString)
import Control.Error
import GHC.Generics
import Control.Monad
import Data.Text (Text)
import TsEvent
data MemberType = MtFilePath | MtFolderPath | MtPassword
| MtText | MtCombo | MtMultiChoice
deriving (Eq, Show, Generic)
instance ToJSON MemberType
instance FromJSON MemberType
data ConfigValueType = Standalone | DependsOnOthers
deriving (Eq, Show, Generic)
instance ToJSON ConfigValueType
instance FromJSON ConfigValueType
data ConfigDataInfo = ConfigDataInfo
{
memberName :: String,
memberLabel :: String,
memberType :: MemberType,
valueType :: ConfigValueType
} deriving (Eq, Show, Generic)
instance ToJSON ConfigDataInfo
instance FromJSON ConfigDataInfo
deriveConfigRecord :: ConfigDataType -> Q [Dec]
deriveConfigRecord (ConfigDataType providerName cfgMembers) = do
let cfgDataName = mkName (providerName ++ "ConfigRecord")
let ctrName = mkName (providerName ++ "ConfigRecord")
fields <- forM cfgMembers createConfigRecordField
showT <- [t|Show|]
return [DataD [] cfgDataName [] Nothing [RecC ctrName fields] [showT]]
createConfigRecordField :: ConfigDataInfo -> Q (Name, Strict, Type)
createConfigRecordField (ConfigDataInfo name _ mType _) = do
let fieldName = mkName name
datatype <- case mType of
MtText -> [t|Text|]
MtPassword -> [t|Text|]
MtFilePath -> [t|String|]
MtFolderPath -> [t|String|]
MtCombo -> [t|Text|]
MtMultiChoice -> [t|[Text]|]
st <- bang noSourceUnpackedness noSourceStrictness
return (fieldName, st, datatype)
data ConfigDataType = ConfigDataType
{
dataName :: String,
members :: [ConfigDataInfo]
} deriving (Eq, Show, Generic)
instance ToJSON ConfigDataType
data GlobalSettings = GlobalSettings
{
getSettingsFolder :: String,
getDataFolder :: String
}
type FolderPath = String
type ContentType = String
type Url = String
-- TODO I think ExceptT is not useful if the monad is IO? Maybe go back to simple IO?
data EventProvider a b = EventProvider {
getModuleName :: String,
getEvents :: a -> GlobalSettings -> Day -> (b -> Url) -> ExceptT String IO [TsEvent],
getConfigType :: [ConfigDataInfo],
getExtraData :: Maybe (a -> GlobalSettings -> b -> IO (Maybe (ContentType, ByteString))),
fetchFieldCts :: Maybe (ConfigDataInfo -> Maybe a -> GlobalSettings -> ExceptT String IO [Text])
}
instance Show (EventProvider a b) where show = getModuleName
decodeVal :: FromJSON a => Value -> a
decodeVal value = case fromJSON value of
Error msg -> error msg
Success a -> a
-- workaround for heteregenous lists. I hate this.
eventProviderWrap :: (FromJSON a, ToJSON a, FromJSON b, ToJSON b) => EventProvider a b
-> EventProvider Value Value
eventProviderWrap (EventProvider innerGetModName innerGetEvents
innerGetConfigType innerGetExtraData innerFetchFieldCts) = EventProvider
{
getModuleName = innerGetModName,
getEvents = \a s d u -> innerGetEvents (decodeVal a) s d (u . toJSON),
getConfigType = innerGetConfigType,
getExtraData = innerGetExtraData >>= \decoder ->
Just $ \cfg s k -> decoder (decodeVal cfg) s (decodeVal k),
fetchFieldCts = innerFetchFieldCts >>= \fetcher ->
Just $ \cdi cfg s -> fetcher cdi (decodeVal <$> cfg) s
}
|
emmanueltouzery/cigale-timesheet
|
src/Shared/EventProvider.hs
|
mit
| 3,928 | 0 | 17 | 838 | 1,069 | 596 | 473 | 87 | 6 |
module ProjectEuler.Problem009Spec (main, spec) where
import Test.Hspec
import ProjectEuler.Problem009
main :: IO ()
main = hspec spec
spec :: Spec
spec = parallel $
describe "solve" $
it "finds the product of the Pythagorean triplet that sums to 1000" $
solve 1000 `shouldBe` Just 31875000
|
hachibu/project-euler
|
test/ProjectEuler/Problem009Spec.hs
|
mit
| 306 | 0 | 9 | 60 | 82 | 44 | 38 | 10 | 1 |
-- |
-- Module : Chess
-- Copyright : Miika-Petteri Matikainen 2014
-- License : GPL-2
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : unknown
--
-- Simple chess library for implementing chess games.
module Chess (
Chess.Internal.Board.Board,
Chess.Internal.Board.Coordinates,
Chess.Internal.Board.Square(..),
Chess.Internal.Move.GameState,
Chess.Internal.Move.currentPlayer,
Chess.Internal.Piece.Color(..),
Chess.Internal.Piece.Piece(..),
Chess.Internal.Piece.PieceType(..),
Chess.Internal.Move.Move,
board,
fullMoveNumber,
isCheckmate,
isDraw,
isLegalMove,
isStalemate,
move,
newGame,
pieceAt,
winner,
legalMoves,
applyMove,
) where
import Chess.Internal.Board
import Chess.Internal.Move
import Chess.Internal.Piece
import qualified Chess.Internal.Game as G
import qualified Chess.Internal.Notation as N
-- | Has the game ended in checkmate
isCheckmate :: GameState -> Bool
isCheckmate = G.isCheckmate
-- | Has the game ended in stalemate
isStalemate :: GameState -> Bool
isStalemate = G.isStalemate
-- | Is the game draw? I.e. is the game stalemate or is the game draw by
-- insufficient material.
isDraw :: GameState -> Bool
isDraw = G.isDraw
-- | Returns the winner of the game if any
winner :: GameState -> Maybe Color
winner = G.getWinner
-- | Is the given move legal. The only supported move format at the moment
-- is coordinate notation.
isLegalMove :: GameState
-> String -- ^ Move in coordinate notation. E.g. "e2-e4" or "b1-c3"
-> Bool
isLegalMove game moveStr = case N.parseMove game moveStr of
Just m -> m `elem` generateAllMoves game
Nothing -> False
-- | Make a move. The only supported move format at the moment is coordinate
-- notation.
move :: GameState
-> String -- ^ Move in coordinate notation. E.g. "e2-e4" or "b1-c3"
-> Maybe GameState
move game moveStr = do m <- N.parseMove game moveStr
applyMove game m
-- | Current board state in the game
board :: GameState -> Board
board = stateBoard
-- | Get initial game state
newGame :: GameState
newGame = initialState
-- | Get the piece at the given coordinate
pieceAt :: Board
-> String -- ^ Square coordinate. E.g. "e4"
-> Maybe Piece
pieceAt b coordinateStr = do coords <- parseCoordinate coordinateStr
getPiece b coords
-- | Full move number. Incremented after black's move.
fullMoveNumber :: GameState -> Integer
fullMoveNumber = moveNumber
-- | Get all legal moves in the position
legalMoves :: GameState -> [Move]
legalMoves = generateAllMoves
-- | Apply a move
applyMove :: GameState -> Move -> Maybe GameState
applyMove game m = case G.applyMove game m of
Left _ -> Nothing
Right game' -> Just game'
|
nablaa/hchesslib
|
src/Chess.hs
|
gpl-2.0
| 3,196 | 0 | 9 | 968 | 516 | 307 | 209 | 63 | 2 |
-- Copyright (C) 2002-2004 David Roundy
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2, or (at your option)
-- any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; see the file COPYING. If not, write to
-- the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
-- Boston, MA 02110-1301, USA.
{-# LANGUAGE CPP #-}
module Darcs.UI.Commands.WhatsNew
(
whatsnew
, status
) where
import Prelude hiding ( (^), catch )
import Control.Applicative ( (<$>) )
import Control.Monad ( void )
import Control.Monad.Reader ( runReaderT )
import Control.Monad.State ( evalStateT, liftIO )
import Storage.Hashed.Tree ( Tree )
import System.Exit ( ExitCode (..), exitSuccess, exitWith )
import Darcs.Patch
( PrimOf, PrimPatch, RepoPatch
, applyToTree, plainSummaryPrims, primIsHunk
)
import Darcs.Patch.Apply ( ApplyState )
import Darcs.Patch.Choices ( patchChoicesLps, lpPatch )
import Darcs.Patch.FileHunk ( IsHunk (..) )
import Darcs.Patch.Format ( PatchListFormat (..) )
import Darcs.Patch.Inspect ( PatchInspect (..) )
import Darcs.Patch.Patchy ( Patchy )
import Darcs.Patch.Permutations ( partitionRL )
import Darcs.Patch.Prim.Class ( PrimDetails (..) )
import Darcs.Patch.Show ( ShowPatch )
import Darcs.Patch.Split ( primSplitter )
import Darcs.Patch.TouchesFiles ( choosePreTouching )
import Darcs.Patch.Witnesses.Ordered
( (:>) (..), FL (..), RL (..)
, lengthFL, reverseFL, reverseRL
)
import Darcs.Patch.Witnesses.Sealed
( Sealed (..), Sealed2 (..)
, unFreeLeft
)
import Darcs.Patch.Witnesses.Unsafe ( unsafeCoerceP )
import Darcs.Patch.Witnesses.WZipper ( FZipper (..) )
import Darcs.Repository
( RepoJob (..), Repository
, listRegisteredFiles, readRecorded
, unrecordedChangesWithPatches, withRepository
)
import Darcs.Repository.Diff ( treeDiff )
import Darcs.Repository.Prefs ( filetypeFunction )
import Darcs.Repository.Util ( getMovesPs, getReplaces )
import Darcs.UI.Commands
( DarcsCommand(..), withStdOpts, amInRepository
, commandAlias, nodefaults
)
import Darcs.UI.Commands.Util ( announceFiles )
import Darcs.UI.Flags
( DarcsFlag (Summary, LookForAdds, LookForMoves), diffAlgorithm, diffingOpts
, isUnified, useCache, fixSubPaths
, verbosity, isInteractive, isUnified, lookForAdds, lookForMoves, lookForReplaces, hasSummary
)
import Darcs.UI.Options ( DarcsOption, (^), odesc, ocheck, onormalise, defaultFlags, parseFlags )
import qualified Darcs.UI.Options.All as O
import Darcs.UI.PrintPatch
( contextualPrintPatch, printPatch
, printPatchPager
)
import Darcs.UI.SelectChanges
( InteractiveSelectionContext (..)
, InteractiveSelectionM, KeyPress (..)
, WhichChanges (..), backAll
, backOne, currentFile
, currentPatch, decide
, decideWholeFile, helpFor
, keysFor, prompt
, selectionContextPrim, skipMundane
, skipOne, printSummary
)
import qualified Darcs.UI.SelectChanges as S ( PatchSelectionOptions (..) )
import Darcs.Util.Path ( AbsolutePath, SubPath, toFilePath )
import Darcs.Util.Printer
( putDocLn, renderString, RenderMode(..)
, text, vcat
)
import Darcs.Util.Prompt ( PromptConfig (..), promptChar )
whatsnewBasicOpts :: DarcsOption a
(Maybe O.Summary
-> O.WithContext
-> O.LookFor
-> O.DiffAlgorithm
-> Maybe String
-> Maybe Bool
-> a)
whatsnewBasicOpts
= O.summary
^ O.withContext
^ O.lookfor
^ O.diffAlgorithm
^ O.workingRepoDir
^ O.interactive -- False
whatsnewAdvancedOpts :: DarcsOption a (O.UseIndex -> Bool -> a)
whatsnewAdvancedOpts = O.useIndex ^ O.includeBoring
whatsnewOpts :: DarcsOption a
(Maybe O.Summary
-> O.WithContext
-> O.LookFor
-> O.DiffAlgorithm
-> Maybe String
-> Maybe Bool
-> Maybe O.StdCmdAction
-> Bool
-> Bool
-> O.Verbosity
-> Bool
-> O.UseIndex
-> Bool
-> O.UseCache
-> Maybe String
-> Bool
-> Maybe String
-> Bool
-> a)
whatsnewOpts = whatsnewBasicOpts `withStdOpts` whatsnewAdvancedOpts
patchSelOpts :: [DarcsFlag] -> S.PatchSelectionOptions
patchSelOpts flags = S.PatchSelectionOptions
{ S.verbosity = verbosity flags
, S.matchFlags = []
, S.diffAlgorithm = diffAlgorithm flags
, S.interactive = isInteractive True flags
, S.selectDeps = O.PromptDeps -- option not supported, use default
, S.summary = hasSummary (defaultSummary flags) flags
, S.withContext = isUnified flags
}
defaultSummary :: [DarcsFlag] -> O.Summary
defaultSummary flags = if lookForAdds flags == O.YesLookForAdds then O.YesSummary else O.NoSummary
whatsnew :: DarcsCommand [DarcsFlag]
whatsnew = DarcsCommand
{ commandProgramName = "darcs"
, commandName = "whatsnew"
, commandHelp = whatsnewHelp
, commandDescription = whatsnewDescription
, commandExtraArgs = -1
, commandExtraArgHelp = ["[FILE or DIRECTORY]..."]
, commandCommand = whatsnewCmd
, commandPrereq = amInRepository
, commandGetArgPossibilities = listRegisteredFiles
, commandArgdefaults = nodefaults
, commandAdvancedOptions = odesc whatsnewAdvancedOpts
, commandBasicOptions = odesc whatsnewBasicOpts
, commandDefaults = defaultFlags whatsnewOpts
, commandCheckOptions = ocheck whatsnewOpts
, commandParseOptions = onormalise whatsnewOpts
}
whatsnewDescription :: String
whatsnewDescription = "List unrecorded changes in the working tree."
whatsnewHelp :: String
whatsnewHelp =
"The `darcs whatsnew` command lists unrecorded changes to the working\n" ++
"tree. If you specify a set of files and directories, only unrecorded\n" ++
"changes to those files and directories are listed.\n" ++
"\n" ++
"With the `--summary` option, the changes are condensed to one line per\n" ++
"file, with mnemonics to indicate the nature and extent of the change.\n" ++
"The `--look-for-adds` option causes candidates for `darcs add` to be\n" ++
"included in the summary output. Summary mnemonics are as follows:\n" ++
"\n" ++
"* `A f` and `A d/` respectively mean an added file or directory.\n" ++
"* `R f` and `R d/` respectively mean a removed file or directory.\n" ++
"* `M f -N +M rP` means a modified file, with `N` lines deleted, `M`\n" ++
" lines added, and `P` lexical replacements.\n" ++
"* `f -> g` means a moved file or directory.\n" ++
"* `a f` and `a d/` respectively mean a new, but unadded, file or\n" ++
" directory, when using `--look-for-adds`.\n" ++
"\n" ++
" An exclamation mark (!) as in `R! foo.c`, means the hunk is known to\n" ++
" conflict with a hunk in another patch. The phrase `duplicated`\n" ++
" means the hunk is known to be identical to a hunk in another patch.\n" ++
"\n" ++
"By default, `darcs whatsnew` uses Darcs' internal format for changes.\n" ++
"To see some context (unchanged lines) around each change, use the\n" ++
"`--unified` option. To view changes in conventional `diff` format, use\n" ++
"the `darcs diff` command; but note that `darcs whatsnew` is faster.\n" ++
"\n" ++
"This command exits unsuccessfully (returns a non-zero exit status) if\n" ++
"there are no unrecorded changes.\n"
whatsnewCmd :: (AbsolutePath, AbsolutePath) -> [DarcsFlag] -> [String] -> IO ()
whatsnewCmd fps opts args =
withRepository (useCache opts) $ RepoJob $ \(repo :: Repository p wR wU wR) -> do
files <- if null args
then return Nothing
else Just <$> fixSubPaths fps args
let isLookForMoves = lookForMoves opts == O.YesLookForMoves && parseFlags O.summary opts /= Just O.NoSummary
isLookForAdds = lookForAdds opts == O.YesLookForAdds && parseFlags O.summary opts /= Just O.NoSummary
isLookForReplaces = lookForReplaces opts == O.YesLookForReplaces
-- LookForAdds and LookForMoves implies Summary, unless it's explcitly disabled.
optsModifier = if isLookForAdds
then (Summary :) . filter (\o -> LookForAdds /= o &&
LookForMoves /= o )
else id
opts' = optsModifier opts
movesPs <- if isLookForMoves
then getMovesPs repo files
else return NilFL
Sealed replacePs <- if isLookForReplaces
then getReplaces (diffingOpts opts) repo files
else return (Sealed NilFL)
Sealed noLookChanges <- filteredUnrecordedChanges opts' repo files movesPs
(unsafeCoerceP replacePs :: FL (PrimOf p) wR wR)
pristine <- readRecorded repo
-- If we are looking for moves, return the corresponding FL of changes.
-- If we are looking for adds, return the corresponding FL of changes.
Sealed unaddedNewPathsPs <- if isLookForAdds
then do
-- Use opts not opts', here, since we *do* want to look for adds.
Sealed lookChanges <- filteredUnrecordedChanges opts repo files movesPs (unsafeCoerceP replacePs :: FL (PrimOf p) wR wR)
noLookAddsTree <- applyAddPatchesToPristine noLookChanges pristine
lookAddsTree <- applyAddPatchesToPristine lookChanges pristine
ftf <- filetypeFunction
-- Return the patches that create files/dirs that aren't yet added.
unFreeLeft <$> treeDiff (diffAlgorithm opts) ftf noLookAddsTree lookAddsTree
else return (Sealed NilFL)
announceFiles files "What's new in"
exitOnNoChanges (unaddedNewPathsPs, noLookChanges)
if maybeIsInteractive opts
then runInteractive (interactiveHunks pristine) opts' pristine noLookChanges
else do
printChanges opts' pristine noLookChanges
printUnaddedPaths unaddedNewPathsPs
where
-- |Filter out hunk patches (leaving add patches) and return the tree
-- resulting from applying the filtered patches to the pristine tree.
applyAddPatchesToPristine ps pristine = do
adds :> _ <- return $ partitionRL primIsHunk $ reverseFL ps
applyToTree (reverseRL adds) pristine
exitOnNoChanges :: (FL p wX wY, FL p wU wV) -> IO ()
exitOnNoChanges (NilFL, NilFL) = do putStrLn "No changes!"
exitWith $ ExitFailure 1
exitOnNoChanges _ = return ()
printUnaddedPaths :: PrimPatch p => FL p wX wY -> IO ()
printUnaddedPaths NilFL = return ()
printUnaddedPaths ps =
putDocLn . lowercaseAs . renderString Encode . plainSummaryPrims $ ps
-- Make any add markers lowercase, to distinguish new-but-unadded files
-- from those that are unrecorded, but added.
lowercaseAs x = vcat $ map (text . lowercaseA) $ lines x
lowercaseA ('A' : x) = 'a' : x
lowercaseA x = x
-- |Appropriately print changes, according to the passed flags.
printChanges :: (PatchListFormat p, IsHunk p, Patchy p, ShowPatch p, PrimDetails p,
ApplyState p ~ Tree) => [DarcsFlag] -> Tree IO -> FL p wX wY
-> IO ()
printChanges opts' pristine changes
| Summary `elem` opts' = putDocLn $ plainSummaryPrims changes
| isUnified opts' == O.YesContext = contextualPrintPatch pristine changes
| otherwise = printPatch changes
-- |return the unrecorded changes that affect an optional list of paths.
filteredUnrecordedChanges :: forall p wR wU wT. (RepoPatch p, ApplyState p ~ Tree,
ApplyState (PrimOf p) ~ Tree) => [DarcsFlag]
-> Repository p wR wU wT -> Maybe [SubPath]
-> FL (PrimOf p) wR wT -- look-for-moves patches
-> FL (PrimOf p) wT wT -- look-for-replaces patches
-> IO (Sealed (FL (PrimOf p) wT))
filteredUnrecordedChanges opts' repo files movesPs replacesPs =
let filePaths = map toFilePath <$> files in
choosePreTouching filePaths <$> unrecordedChangesWithPatches (diffingOpts opts') repo files movesPs replacesPs
-- | Runs the 'InteractiveSelectionM' code
runInteractive :: (PatchListFormat p, IsHunk p, Patchy p, ShowPatch p,
PrimPatch p, PatchInspect p, PrimDetails p,
ApplyState p ~ Tree)
=> InteractiveSelectionM p wX wY () -- Selection to run
-> [DarcsFlag] -- Command-line options
-> Tree IO -- Pristine
-> FL p wX wY -- A list of patches
-> IO ()
runInteractive i opts pristine ps' = do
let (choices',lps') = patchChoicesLps ps'
let ps = evalStateT i $
ISC { total = lengthFL lps'
, current = 0
, lps = FZipper NilRL lps'
, choices = choices'
}
void $ runReaderT ps $
selectionContextPrim First "view" (patchSelOpts opts) (Just primSplitter)
Nothing (Just pristine)
-- | The interactive part of @darcs whatsnew@
interactiveHunks :: (PatchListFormat p, IsHunk p, Patchy p, ShowPatch p,
PatchInspect p, PrimDetails p, ApplyState p ~ Tree)
=> Tree IO -> InteractiveSelectionM p wX wY ()
interactiveHunks pristine = do
c <- currentPatch
case c of
Nothing -> liftIO $ putStrLn "No more changes!"
Just (Sealed2 lp) -> do
liftIO $ printPatch (lpPatch lp)
repeatThis lp
where
repeatThis lp = do
thePrompt <- prompt -- "Shall I view this change? (n/m)"
yorn <- liftIO $ promptChar
(PromptConfig thePrompt (keysFor basic_options) (keysFor adv_options)
(Just 'n') "?h")
case yorn of
-- View hunk in context
'v' -> liftIO (contextualPrintPatch pristine (lpPatch lp))
>> repeatThis lp
-- View summary of the change
'x' -> liftIO (printSummary (lpPatch lp))
>> repeatThis lp
-- View hunk and move on
'y' -> liftIO (contextualPrintPatch pristine (lpPatch lp))
>> decide True lp >> next_hunk
-- Go to the next patch
'n' -> decide False lp >> next_hunk
-- Skip the whole file
's' -> do
currentFile >>= maybe
(return ())
(\f -> decideWholeFile f False)
next_hunk
-- View hunk in a pager
'p' -> liftIO (printPatchPager $ lpPatch lp)
>> repeatThis lp
-- Next hunk
'j' -> next_hunk
-- Previous hunk
'k' -> prev_hunk
-- Start from the first change
'g' -> start_over
-- Quit whatsnew
'q' -> liftIO $ exitSuccess
_ -> do liftIO . putStrLn $
helpFor "whatsnew" basic_options adv_options
repeatThis lp
start_over = backAll >> interactiveHunks pristine
next_hunk = skipOne >> skipMundane >> interactiveHunks pristine
prev_hunk = backOne >> interactiveHunks pristine
options_yn =
[ KeyPress 'v' "view this hunk in a context"
, KeyPress 'y'
"view this hunk in a context and go to the next one"
, KeyPress 'n' "go to the next hunk" ]
optionsView =
[ KeyPress 'p' "view this hunk in context wih pager "
, KeyPress 'x' "view a summary of this patch"
]
optionsNav =
[ KeyPress 'q' "quit whatsnew"
, KeyPress 's' "skip the rest of the changes to this file"
, KeyPress 'j' "skip to the next hunk"
, KeyPress 'k' "back up to previous hunk"
, KeyPress 'g' "start over from the first hunk"
]
basic_options = [ options_yn ]
adv_options = [ optionsView, optionsNav ]
-- |status is an alias for whatsnew, with implicit Summary and LookForAdds
-- flags. We override the default description, to include the implicit flags.
status :: DarcsCommand [DarcsFlag]
status = statusAlias { commandCommand = statusCmd
, commandDescription = statusDesc
}
where
statusAlias = commandAlias "status" Nothing whatsnew
statusCmd fps fs = commandCommand whatsnew fps (Summary : LookForAdds : fs)
statusDesc = "Alias for `darcs " ++ commandName whatsnew ++ " -ls '."
maybeIsInteractive :: [DarcsFlag] -> Bool
maybeIsInteractive = maybe False id . parseFlags O.interactive
|
DavidAlphaFox/darcs
|
src/Darcs/UI/Commands/WhatsNew.hs
|
gpl-2.0
| 17,283 | 0 | 31 | 4,867 | 3,560 | 1,922 | 1,638 | -1 | -1 |
{-# LANGUAGE ExistentialQuantification #-}
{- |
Module : $Header$
Description : Common Data types to be used between interfaces
Copyright : (c) Uni Bremen 2002-2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable (imports Logic)
Different data structures that are (or should be) shared by all interfaces
of Hets
-}
module Interfaces.DataTypes
( IntState (..)
, getMaybeLib
, IntHistory (..)
, CmdHistory (..)
, IntIState (..)
, Int_NodeInfo (..)
, UndoRedoElem (..)
, ListChange (..)
) where
import Static.DevGraph
import Common.LibName
import Proofs.AbstractState
import Logic.Comorphism
import Interfaces.Command
import Interfaces.GenericATPState
{- | Internal state of the interface, it contains the development graph
and a full history. While this in most cases describes the state of
development graph at a given time for GUI it is not the same for the
PGIP ( it does not describe selected nodes). If one switches from one
interface to the other passing this informations should be sufficient
with minimal loss of information ( like selected nodes, unfinished
script .. and so on) -}
data IntState = IntState
{ i_hist :: IntHistory -- ^ global history management
, i_state :: Maybe IntIState -- ^ internal state
, filename :: String }
getMaybeLib :: IntState -> Maybe (LibName, LibEnv)
getMaybeLib = fmap (\ s -> (i_ln s, i_libEnv s)) . i_state
{- | Contains the detailed global history as two list, a list of actions
for undo, and a list of action for redo commands -}
data IntHistory = IntHistory
{ undoList :: [CmdHistory]
, redoList :: [CmdHistory] }
{- | Contains command description needed for undo\/redo actions and
for displaying commands in the history -}
data CmdHistory = CmdHistory
{ command :: Command
, cmdHistory :: [UndoRedoElem] }
{- | History elements for the proof state, only LibName would be used
by GUI because it keeps track only to changes to the development graph,
the other are for PGIP but in order to integrate both they should use
same structure -}
data UndoRedoElem =
UseThmChange Bool
| Save2FileChange Bool
| ProverChange (Maybe G_prover)
| ConsCheckerChange (Maybe G_cons_checker)
| ScriptChange ATPTacticScript
| LoadScriptChange Bool
| CComorphismChange (Maybe AnyComorphism)
| ListChange [ListChange]
| IStateChange (Maybe IntIState)
| DgCommandChange LibName
data ListChange =
AxiomsChange [String] Int
| GoalsChange [String] Int
| NodesChange [Int_NodeInfo]
-- | full description of the internal state required by all interfaces
data IntIState = IntIState
{ i_libEnv :: LibEnv
, i_ln :: LibName
{- these are PGIP specific, but they need to be treated by the common
history mechanism , therefore they need to be here -}
, elements :: [Int_NodeInfo]
, cComorphism :: Maybe AnyComorphism
, prover :: Maybe G_prover
, consChecker :: Maybe G_cons_checker
, save2file :: Bool
, useTheorems :: Bool
, script :: ATPTacticScript
, loadScript :: Bool }
data Int_NodeInfo = Element ProofState Int
|
nevrenato/Hets_Fork
|
Interfaces/DataTypes.hs
|
gpl-2.0
| 3,163 | 0 | 10 | 627 | 436 | 268 | 168 | 55 | 1 |
{-# LANGUAGE DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
module Algebraic.Set where
import qualified Algebraic.Nested.Type as Nested
import Algebraic.Nested.Op
import Algebraic.Nested.Restriction
import Algebraic2.Class
import Algebraic2.Instance as AI
import Condition
import Autolib.ToDoc
import Autolib.Choose
import Autolib.Reader
import Autolib.Size
import Autolib.FiniteMap
import qualified Autolib.Reporter.Set
import Data.Typeable
data Algebraic_Set = Algebraic_Set deriving ( Read, Show, Typeable )
instance Algebraic Algebraic_Set () ( Nested.Type Integer ) where
-- evaluate :: tag -> Exp a -> Reporter a
evaluate tag bel exp = do
v <- tfoldB bel inter exp
inform $ vcat [ text "Der Wert Ihres Terms ist"
, nest 4 $ toDoc v
]
return v
-- equivalent :: tag -> a -> a -> Reporter Bool
equivalent tag a b = do
inform $ text "stimmen die Werte überein?"
let ab = difference a b
ba = difference b a
err = union ab ba
let ok = is_empty err
when ( not ok ) $ reject $ vcat
[ text "Nein, diese Elemente kommen nur in jeweils einer der Mengen vor:"
, nest 4 $ toDoc err
]
return ok
-- some_formula :: tag -> Algebraic.Instance.Type a -> Exp a
some_formula tag i = read "pow (1 + pow (2)) "
default_context tag = ()
-- default_instance :: tag -> Algebraic.Instance.Type a
default_instance tag = AI.Make
{ target = Nested.example
, context = ()
, description = Nothing
, operators = default_operators tag
, predefined = listToFM
[ (read "A", read "{1,3,5,6}" )
, (read "B", read "{2,3,6,7}" )
]
, max_size = 7
}
default_operators tag = bops
|
Erdwolf/autotool-bonn
|
src/Algebraic/Set.hs
|
gpl-2.0
| 1,790 | 23 | 11 | 483 | 395 | 224 | 171 | 44 | 0 |
module Control.Queries where
import Control.SQL
import Control.Types
import Inter.Crypt
import Control.Monad ( guard )
import Data.Maybe ( maybeToList )
import Data.List ( intersperse )
import Data.Typeable
import Database.MySQL.HSQL ( SqlBind )
import Helper
import Mysqlconnect
-- | Header extrahieren aus SQL
showColTypes :: Statement -> [ String ]
showColTypes stat = [ s | (s,t,b) <- getFieldsTypes stat ]
-- ------------------------------------------------------------------------------
-- DB Funktionen
-- ------------------------------------------------------------------------------
wrapped msg act = do
logged $ msg ++ " ... "
x <- act
logged $ " ... " ++ msg
return x
-- | liefert bewertete Aufgaben von mat aus DB,
-- TODO: mat = [] sollte auch StudentMNr als Spalte zurückliefern
studAufgDB :: Maybe MNr -> IO ( [ String ] , [ [ StrOrInt ] ])
studAufgDB mat = wrapped "studAufgDB" $ do
conn <- myconnect
stat <- squery conn $
Query ( Select ( [ reed "vorlesung.Name as Vorlesung"
, reed "aufgabe.Name as Name"
, reed "aufgabe.Typ as Typ"
, reed "stud_aufg.Ok as Ok"
, reed "stud_aufg.No as No"
]
)
)
[ From $ map reed [ "student", "aufgabe", "stud_aufg", "vorlesung" ]
, Where $ ands
$ [ reed "stud_aufg.SNr = student.SNr"
, reed "stud_aufg.ANr = aufgabe.ANr"
, reed "vorlesung.VNr = aufgabe.VNr"
] ++
[ equals (reed "student.MNr") (toEx mnr)
| mnr <- maybeToList mat
]
]
inh <- collectRows ( \ state -> do
v <- getFieldValue state "Vorlesung"
s <- getFieldValue state "Name"
t <- getFieldValue state "Typ"
Oks o <- getFieldValue state "Ok"
Nos n <- getFieldValue state "No"
return [ S v, S s , S t, I o , I n ] -- FIXME
) stat
disconnect conn
return ( showColTypes stat, inh )
-- | Neuen Studenten einfügen
-- TODO: EMail-Ueberpruefung
insertNewStudentDB :: String -- ^ vorname
-> String -- ^ nachname
-> MNr -- ^ matrikel
-> String -- ^ email
-> String -- ^ passwort
-> IO ()
insertNewStudentDB vnm nme mat eml ps1 = wrapped "insertNewStudentDB" $ do
cps1 <- encrypt ps1
conn <- myconnect
stat <- squery conn $ Query
( Insert (reed "student")
[ (reed "Vorname", EString vnm)
, (reed "Name", EString nme)
, (reed "MNr", toEx mat)
, (reed "Email", EString eml)
, (reed "Passwort", EString $ show cps1)
]
) []
disconnect conn
return ()
-- |
-- Login des Studenten Version 1
--
-- Passt mnr <-> passwort
-- Input: Matrikelnr, passwort (= Nothing -> ohne Kontrolle)
-- Output: IO [ (Vornamen, Namen, Email, Status) ] oder [] bei Fehler
checkPasswdMNrDB :: Maybe String
-> MNr
-> IO [ ( String , String , String , String ) ]
checkPasswdMNrDB maybePass mat = wrapped "checkPasswdMNrDB" $ do
conn <- myconnect
state <- squery conn $ Query
( Select $ [ reed "student.MNr as MNr"
, reed "student.Vorname as Vorname"
, reed "student.Name as Name"
, reed "student.Email as Email"
, reed "student.Passwort as Passwort"
, reed "student.Status as Status"
]
)
[ From [ reed "student" ]
, Where $ equals (reed "student.MNr") (toEx mat)
]
inh <- collectRows ( \ state -> do
a <- getFieldValue state "Vorname"
b <- getFieldValue state "Name"
c <- getFieldValue state "Email"
d <- getFieldValue state "Status"
e <- getFieldValue state "Passwort"
return ( a, b , c , d , reed e )
) state
disconnect conn
return $ do
(a, b, c, d, e) <- inh
guard $ case maybePass of
Nothing -> True
Just pass -> Inter.Crypt.compare e pass
return ( a, b, c, d )
-- |
-- Existiert mat oder email in DB?
--
-- Benutzt bei Registrierung als Vorabcheck auf Duplikate in Mnr, Email
-- Input: Matrikelnr., Email-Adresse
-- Ouput: ( mnr Duplikat :: Bool , email Duplikat ::Bool)
--
duplMatOrEmailDB :: MNr -> String -> IO ( Bool , Bool )
duplMatOrEmailDB mat eml = wrapped "duplMatOrEmailDB" $ do
conn <- myconnect
stat <- query conn
( concat
[ "SELECT student.MNr, student.Email \n"
, "FROM student \n"
, "WHERE student.MNr = \""
, show mat , "\" "
, "OR student.Email = \""
, filterQuots eml , "\" "
, ";"
] )
inh <- collectRows ( \ state -> do
n <- getFieldValue state "MNr"
e <- getFieldValue state "Email"
return ( n , e )
) stat
let mORe = ( ( length [ m | m <- (Prelude.map fst inh), m == mat ] ) > 0
, ( length [ e | e <- (Prelude.map snd inh), e == eml ] ) > 0
)
disconnect conn
return mORe
-- |
-- Liefert alle mgl. Vorlesungen
--
-- Input:
-- Output: IO [ Vorlesungsname ]
--
getAllVorlesungenDB :: IO [ String ]
getAllVorlesungenDB = wrapped "getAllVorlesungenDB" $ do
conn <- myconnect
stat <- squery conn $ Query
( Select $ [ reed "vorlesung.Name" ] )
[ From [ reed "vorlesung" ]]
inh <- collectRows ( \ state -> do
a <- getFieldValue state "Name"
return a
) stat
disconnect conn
return inh
-- |
-- Liefert bepunktete Vorlesungen von Matrikelnr.
--
-- Input: Matrikelnr.
-- Output: IO [ Vorlesungsname ]
--
getVorlesungWithPointsDB :: MNr -> IO [ String ]
getVorlesungWithPointsDB mnr = wrapped "getVorlesungWithPointsDB" $ do
conn <- myconnect
stat <- squery conn $ Query
( Select $ [ reed "vorlesung.Name as Vorlesung" ] )
[ From $ map reed [ "vorlesung", "stud_aufg" , "student" , "aufgabe" ]
, Where $ ands [ reed "student.SNr = stud_aufg.SNr"
, equals (reed "student.MNr") (toEx mnr)
, reed "vorlesung.VNr = aufgabe.VNr"
, reed "aufgabe.ANr = stud_aufg.ANr"
]
]
inh <- collectRows ( \ state -> do
a <- getFieldValue state "Vorlesung"
return a
) stat
disconnect conn
return inh
-- |
-- Modifiziert Email-Adresse von Matrikelnr.
--
-- Input: Matrikelnr., Email-Adresse
-- Output: IO ()
--
-- TODO email validierung
--
updateEmailDB :: MNr -> String -> IO ()
updateEmailDB mat email = wrapped "updateEmailDB" $ do
conn <- myconnect
stat <- squery conn $ Query
( Update ( reed "student" )
[ ( reed "Email", EString email ) ] )
[ Where $ equals (reed "student.MNr") (toEx mat) ]
disconnect conn
return ()
-- |
-- Modifiziert Passwort von Matrikelnr.
--
-- Input: Matrikelnr., Passwort
-- Output: IO ()
--
updatePasswortDB :: MNr -> String -> IO ()
updatePasswortDB mat pass = wrapped "updatePasswortDB" $ do
cpass <- Inter.Crypt.encrypt pass
conn <- myconnect
stat <- squery conn $ Query
( Update ( reed "student" )
[ ( reed "Passwort", EString $ show cpass ) ] )
[ Where $ equals (reed "student.MNr") (toEx mat) ]
disconnect conn
return ()
-- | Übungsgruppen
-- liefert alle freien Gruppen
getFreeGruppenDB :: IO ( [String], [ (GNr, [String]) ] )
getFreeGruppenDB = wrapped "getFreeGruppenDB" $ do
conn <- myconnect
stat <- query conn $
"SELECT \n"
++ "gruppe.GNr as GNr, "
++ "vorlesung.Name as Vorlesungen,"
++ "gruppe.Name as Gruppen,"
++ "gruppe.Referent as Referent, "
-- Maximum, Aktuelle Anzahl pro Gruppe
++ "gruppe.MaxStudents as studentMax, "
++ "COUNT(SNr) as studentCount" ++ " "
++ "\nFROM gruppe \n"
-- verbinde gruppe mit stud_grp über GNr
++ " LEFT JOIN stud_grp USING (GNr) "
++ ", vorlesung" ++ " "
++ "\nWHERE \n" ++" gruppe.VNr = vorlesung.VNr " ++" "
++ "\nGROUP BY \n" ++ "GNr "
-- nur nicht volle gruppen.
++ "\nHAVING \n" ++ "studentCount" ++ " < " ++ " studentMax " ++ " "
++ "\nORDER BY \n" ++ "Vorlesungen, Gruppen " ++ ";"
inh <- collectRows ( \ state -> do
k <- getFieldValue state "GNr"
v <- getFieldValue state "Vorlesungen"
g <- getFieldValue state "Gruppen"
r <- getFieldValue state "Referent"
-- m <- getFieldValue state "studentMax"
-- c <- getFieldValue state "studentCount"
return ( k :: GNr ,
[ v :: String
, g :: String
, r :: String
-- , m :: String
-- , c :: String
]
)
) stat
disconnect conn
return ( showColTypes stat, inh )
getAllGruppenDB :: IO ( [String], [ (GNr, [String]) ] )
getAllGruppenDB = wrapped "getAllGruppenDB" $ do
conn <- myconnect
state <- query conn $
"SELECT \n"
++ "gruppe.GNr as GNr, "
++ "vorlesung.Name as Vorlesungen,"
++ "gruppe.Name as Gruppen,"
++ "gruppe.Referent as Referent "
++ "\nFROM vorlesung , gruppe "
++ "\nWHERE vorlesung.VNr = gruppe.VNr "
++ "\nORDER BY Vorlesungen,Gruppen,Referent "
++ " ;"
logged "getAllGruppenDB: start collecting"
inh <- collectRows ( \ s -> do
k <- getFieldValue s "GNr"
v <- getFieldValue s "Vorlesungen"
g <- getFieldValue s "Gruppen"
r <- getFieldValue s "Referent"
return ( k :: GNr
, [ v :: String
, g :: String
, r :: String
]
)
) state
logged "getAllGruppenDB: end collecting"
disconnect conn
return ( showColTypes state, inh )
getGruppenStudDB :: MNr -> IO [ GNr ]
getGruppenStudDB ( mat :: MNr ) =
wrapped ( "getAllGruppenDB " ++ show mat ) $ do
conn <- myconnect
stat <- squery conn $ Query
( Select $ [ reed "stud_grp.GNr as GNr" ] )
[ From $ map reed [ "stud_grp", "student" ]
, Where $ ands
[ equals (reed "stud_grp.SNr") (reed "student.SNr")
, equals (reed "student.MNr") (toEx mat)
]
]
inh <- collectRows ( \ state -> do
( g :: GNr ) <- getFieldValue state "GNr"
return ( g )
) stat
disconnect conn
return inh
getSNrFromMatDB :: MNr -> IO [ SNr ]
getSNrFromMatDB ( mat :: MNr ) =
wrapped ( "getSNrFromMatDB: start collecting for " ++ show mat ) $ do
conn <- myconnect
stat <- squery conn $ Query
( Select $ [ reed "student.SNr as SNr" ] )
[ From [ reed "student" ]
, Where $ equals ( reed "student.MNr" ) ( toEx mat )
]
inh <- collectRows ( \ state -> do
snr <- getFieldValue state "SNr"
return ( snr :: SNr )
) stat
disconnect conn
return inh
-- | if student ist bereits in gruppe zu gleicher vorlesung,
-- then diese ändern, else gruppe hinzufügen
changeStudGrpDB mat grp =
changeStudGrpDB' mat (fromCGI grp )
changeStudGrpDB' :: MNr -> GNr -> IO ()
changeStudGrpDB' mnr gnr =
wrapped ( "changeStudGrpDB" ++ show (mnr, gnr) ) $ do
snrs <- getSNrFromMatDB mnr
case snrs of
_ | 1 /= length snrs -> return ()
[ snr ] -> do
conn <- myconnect
stat <- squery conn $ Query
( Delete $ reed "stud_grp" )
[ Using $ map reed [ "stud_grp", "gruppe as g1", "gruppe as g2" ]
, Where $ ands
[ equals ( reed "stud_grp.SNr" ) ( toEx snr )
, equals ( reed "stud_grp.GNr" ) ( reed "g1.GNr" )
, equals ( reed "g2.GNr" ) ( toEx gnr )
, equals ( reed "g1.VNr" ) ( reed "g2.VNr" )
]
]
stat <- squery conn $ Query
( Insert ( reed "stud_grp" )
[ ( reed "SNr", toEx snr )
, ( reed "GNr", toEx gnr )
]
) []
disconnect conn
return ()
leaveStudGrpDB mat grp = leaveStudGrpDB' mat ( fromCGI grp )
leaveStudGrpDB' :: MNr -> GNr -> IO ()
leaveStudGrpDB' mnr gnr =
wrapped ( "leaveStudGrpDB" ++ show (mnr, gnr) ) $ do
snrs <- getSNrFromMatDB mnr
case snrs of
_ | 1 /= length snrs -> return ()
[ snr ] -> do
conn <- myconnect
stat <- squery conn $ Query
( Delete $ reed "stud_grp" )
[ Where $ ands
[ equals ( reed "stud_grp.SNr" ) ( toEx snr )
, equals ( reed "stud_grp.GNr" ) ( toEx gnr )
]
]
disconnect conn
return ()
-- tricky
data Col a = Col String
data Cola = forall a . ( Show a, SqlBind a ) => Cola (Col a)
-- | liefert (jetzt!) mgl. Aufgaben für Student
-- FIXME: use Control.Aufgabe type
mglAufgabenDB :: SNr -> IO [ (( ANr, Name, Typ),( Config, HiLo, Remark)) ]
mglAufgabenDB snr = mglAufgabenDB' False snr
mglAufgabenDB' :: Bool
-> SNr
-> IO [(( ANr, Name, Typ), ( Config, HiLo, Remark)) ]
mglAufgabenDB' isAdmin snr =
wrapped ( "mglAufgabenDB" ++ show (isAdmin, snr) ) $ do
let cols = [ "ANr", "Name", "Typ", "Config", "Highscore", "Remark" ]
conn <- myconnect
stat <- squery conn $ Query
( Select $ do
col <- cols
let long = Id [ "aufgabe", col ] ; short = Id [ col ]
return $ Bind ( EId long ) ( Just short )
)
[ From $ map reed [ "aufgabe", "gruppe" , "stud_grp" ]
, Where $ ands $
[ equals ( reed "gruppe.VNr" ) ( reed "aufgabe.VNr" )
, equals ( reed "gruppe.GNr" ) ( reed "stud_grp.GNr" )
, equals ( reed "stud_grp.SNr" ) ( toEx snr )
] ++
[ reed "NOW() BETWEEN ( Von AND Bis )"
| not isAdmin
]
]
inh <- collectRows ( \ state -> do
a <- getFieldValue state "ANr"
n <- getFieldValue state "Name"
t <- getFieldValue state "Typ"
c <- getFieldValue state "Config"
h <- getFieldValue state "Highscore"
r <- getFieldValue state "Remark"
return ( ( a , n, t) , (c, h, r) )
) stat
disconnect conn
return inh
----------------------------------------------------------------------------------
-- | liefert (nun und demnaechst mgl). Aufgaben für Student
-- > bzw. alle Student (snr=[])
-- > ( [header ... ] , [ ( ANr, Name , Subject , Path , Highscore , Von , Bis ) ] )
mglNextAufgabenDB_old :: SNr -> IO ( [String],[[String]])
mglNextAufgabenDB_old snr =
wrapped ( "mglNextAufgabenDB" ++ show snr ) $ do
let ssnr = toString snr
conn <- myconnect
stat <- query conn
( concat
[ "SELECT aufgabe.ANr, aufgabe.Typ as Typ, aufgabe.Name as Name, aufgabe.Highscore \n"
, ", DATE_FORMAT( aufgabe.Von , \"%H:%i %a %d. %b %Y\") as Von "
, ", DATE_FORMAT( aufgabe.Bis , \"%H:%i %a %d. %b %Y\") as Bis\n"
, "FROM aufgabe "
, if null ssnr
then " \n"
else ", gruppe, stud_grp \n"
, "WHERE \n"
, if null ssnr
then
" "
else
"gruppe.VNr = aufgabe.VNr \n" ++
"AND gruppe.GNr = stud_grp.GNr \n" ++
"AND stud_grp.SNr = \"" ++ filterQuots ssnr ++ "\" \n" ++
"AND \n"
-- noch offene Aufg.
, "NOW() < Bis "
, ";"
] )
inh <- collectRows ( \ state -> do
a <- getFieldValue state "ANr"
b <- getFieldValue state "Name"
c <- getFieldValue state "Typ"
h <- getFieldValue state "Highscore"
vo <- getFieldValue state "Von"
bi <- getFieldValue state "Bis"
return [ a, b , c , h ,vo , bi ]
) stat
disconnect conn
return ( showColTypes stat, inh )
-- | liefert bewertete Aufgaben von mat aus DB,
-- TODO: mat = [] sollte auch StudentMNr als Spalte zurückliefern
mglNextAufgabenDB :: SNr -> IO ( [String],[[StrOrInt]])
mglNextAufgabenDB snr = wrapped "mglNextAufgabenDB" $ do
conn <- myconnect
stat <- squery conn $
Query ( Select ( [ reed "vorlesung.Name as Vorlesung"
, reed "aufgabe.Name as Name"
, reed "aufgabe.Typ as Typ"
, reed "aufgabe.Von as Von"
, reed "aufgabe.Bis as Bis"
]
)
)
[ From $ map reed [ "stud_grp", "aufgabe"
, "gruppe", "vorlesung" ]
, Where $ ands
$ [ equals (toEx snr) (reed "stud_grp.SNr")
, reed "stud_grp.GNr = gruppe.GNr"
, reed "gruppe.VNr = vorlesung.VNr"
, reed "vorlesung.VNr = aufgabe.VNr"
, reed "NOW() BETWEEN ( Von AND Bis )"
]
]
inh <- collectRows ( \ state -> do
v <- getFieldValue state "Vorlesung"
s <- getFieldValue state "Name"
t <- getFieldValue state "Typ"
( von :: Time ) <- getFieldValue state "Von"
( bis :: Time ) <- getFieldValue state "Bis"
return [ S v, S s , S t
, S $ toString von , S $ toString bis
]
) stat
disconnect conn
return ( showColTypes stat, inh )
|
marcellussiegburg/autotool
|
db/src/Control/Queries.hs
|
gpl-2.0
| 18,930 | 440 | 16 | 7,459 | 4,379 | 2,370 | 2,009 | -1 | -1 |
-- | Este módulo define el tipo de las reglas de re-escritura.
{-# Language OverloadedStrings #-}
module Equ.Rule (
Relation (..)
, Rule (..)
-- * Constructores de las diferentes relaciones.
, relEq, relEquiv, relImpl, relCons, relEval, mkrule
, getRelationFromType
)
where
import Equ.Expr
import Equ.Types
import Data.Text
import Data.Serialize
import Control.Applicative ((<$>), (<*>))
import Test.QuickCheck(Arbitrary, arbitrary, elements)
-- | Nombres de las relaciones entre pasos de una demostracion
data RelName = Eq -- ^ Igualdad polimorfica excepto para formulas
| Equiv -- ^ FOL: equivalencia
| Impl -- ^ FOL: implicacion
| Cons -- ^ FOL: consecuencia
| Eval -- ^ FUN: evaluacion
deriving (Eq, Show, Enum)
instance Arbitrary RelName where
arbitrary = elements [ Eq
, Equiv
, Impl
, Cons
]
-- | Relaciones entre pasos de una demostracion
data Relation = Relation {
relRepr :: Text
, relName :: RelName
, relSym :: Bool -- ^ Es la relación simétrica? Este dato es usado en la
-- creación de reglas.
, relTy :: Type -- ^ Este es el tipo de las cosas relacionadas.
}
deriving Eq
instance Show Relation where
show = unpack . relRepr
instance Arbitrary Relation where
arbitrary = Relation <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
-- | Relación de igualdad ; =
relEq :: Relation
relEq = Relation { relRepr = pack "="
, relName = Eq
, relSym = True
, relTy = tyVar "A"
}
-- | Relación de equivalencia ; ≡
relEquiv :: Relation
relEquiv = Relation { relRepr = pack "≡"
, relName = Equiv
, relSym = True
, relTy = tyBool
}
-- | Relación de implicación ; "⇒"
relImpl :: Relation
relImpl = Relation { relRepr = pack "⇒"
, relName = Impl
, relSym = False
, relTy = tyBool
}
-- | relación de consecuencia ; ⇐
relCons :: Relation
relCons = Relation { relRepr = pack "⇐"
, relName = Cons
, relSym = False
, relTy = tyBool
}
relEval :: Relation
relEval = Relation (pack "↝") Eval False (tyVar "a")
-- | Regla de reescritura
data Rule = Rule {
lhs :: Expr
, rhs :: Expr
, rel :: Relation
, name :: Text
, desc :: Text
}
deriving (Show, Eq)
mkrule :: Expr -> Expr -> Relation -> Rule
mkrule e e' r = Rule e e' r "" ""
getRelationFromType :: Type -> Relation
getRelationFromType (TyAtom ATyBool) = relEquiv
getRelationFromType _ = relEq
instance Arbitrary Rule where
arbitrary = Rule <$> arbitrary <*> arbitrary <*>
arbitrary <*> arbitrary <*> arbitrary
instance Serialize Rule where
put (Rule ls rs r n d) = put ls >> put rs >> put r >>
put n >> put d
get = Rule <$> get <*> get <*> get <*> get <*> get
instance Serialize RelName where
put = putWord8 . toEnum . fromEnum
get = getWord8 >>= return . toEnum . fromEnum
instance Serialize Relation where
put (Relation r n s t) = put r >> put n >> put s >> put t
get = Relation <$> get <*> get <*> get <*> get
|
miguelpagano/equ
|
Equ/Rule.hs
|
gpl-3.0
| 3,577 | 0 | 10 | 1,311 | 799 | 457 | 342 | 80 | 1 |
-- |
-- Module : Commands.Update
-- Copyright : (C) 2014-2016 Jens Petersen
--
-- Maintainer : Jens Petersen <[email protected]>
-- Stability : alpha
--
-- Explanation: update spec file to a new package version
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
module Commands.Update (
update
) where
import Commands.Spec (createSpecFile)
import Distro (detectDistro, Distro(..))
import FileUtils (withTempDirectory)
import PackageUtils (PackageData (..), bringTarball, isGitDir, latestPkg,
packageName, packageVersion, prepare, removePrefix,
revision)
import Setup (RpmFlags (..))
import SysCmd (cmd_, cmdBool, cmdIgnoreErr, (+-+))
import Control.Applicative ((<$>))
import Control.Monad (when)
import Distribution.PackageDescription (PackageDescription (..))
import Distribution.Simple.Utils (die)
import Data.Maybe (fromMaybe)
import System.Directory (copyFile, createDirectory, getCurrentDirectory,
setCurrentDirectory)
import System.FilePath ((</>))
update :: PackageData -> RpmFlags -> IO ()
update pkgdata flags =
case specFilename pkgdata of
Nothing -> die "No (unique) .spec file in directory."
Just spec -> do
let pkgDesc = packageDesc pkgdata
pkg = package pkgDesc
name = packageName pkg
ver = packageVersion pkg
current = name ++ "-" ++ ver
latest <- latestPkg name
if current == latest
then error $ current +-+ "is latest version."
else do
bringTarball latest
gitDir <- getCurrentDirectory >>= isGitDir
rwGit <- if gitDir then cmdBool "grep -q 'url = ssh://' .git/config" else return False
when rwGit $
cmd_ "fedpkg" ["new-sources", latest ++ ".tar.gz"]
withTempDirectory $ \cwd -> do
(curspec, _) <- createSpecVersion current spec cwd
(newspec, revised) <- createSpecVersion latest spec cwd
patch <- cmdIgnoreErr "diff" ["-u2", "-I - spec file generated by cabal-rpm", "-I Fedora Haskell SIG <[email protected]>", curspec, newspec] ""
putStrLn patch
out <- cmdIgnoreErr "patch" ["-d", cwd, "-p1" ] patch
putStrLn out
setCurrentDirectory cwd
distro <- fromMaybe detectDistro (return <$> rpmDistribution flags)
let suffix = if distro == SUSE then "" else "%{?dist}"
cmd_ "sed" ["-i", "-e s/^\\(Release: \\).*/\\10" ++ suffix ++ "/", spec]
let newver = removePrefix (name ++ "-") latest
if distro == SUSE
then cmd_ "sed" ["-i", "-e s/^\\(Version: \\).*/\\1" ++ newver ++ "/", spec]
else cmd_ "rpmdev-bumpspec" ["-c", "update to" +-+ newver, spec]
when rwGit $ do
when revised $
cmd_ "git" ["add", name ++ ".cabal"]
cmd_ "git" ["commit", "-a", "-m", "update to" +-+ newver]
where
createSpecVersion :: String -> String -> FilePath -> IO (FilePath, Bool)
createSpecVersion pkgver spec topdir = do
pkgdata' <- prepare (Just pkgver) flags
let pkgDesc = packageDesc pkgdata'
revised = revision pkgDesc
when revised $ do
let cabalPath = pkgver </> cabalFilename pkgdata'
name = packageName $ package pkgDesc
cabalfile = topdir </> name ++ ".cabal"
copyFile cabalPath cabalfile
let pkgdata'' = pkgdata' { specFilename = Just spec }
createDirectory pkgver
newspec <- createSpecFile pkgdata'' flags (Just pkgver)
return (newspec, revised)
|
mimi1vx/cabal-rpm
|
src/Commands/Update.hs
|
gpl-3.0
| 3,797 | 0 | 24 | 998 | 930 | 490 | 440 | 70 | 6 |
{-# LANGUAGE CPP #-}
module DualTreeGhc2Spec (main, spec) where
import Test.Hspec
import qualified GHC as GHC
import Language.Haskell.TokenUtils.DualTree
import Language.Haskell.TokenUtils.Layout
-- import Language.Haskell.TokenUtils.GHC.Layout
import Language.Haskell.TokenUtils.Types
import TestUtils
-- ---------------------------------------------------------------------
main :: IO ()
main = do
hspec spec
spec :: Spec
spec = do
-- ---------------------------------------------
describe "layoutTreeToSourceTree" $ do
it "retrieves the tokens in SourceTree format LetExpr" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/LetExpr.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
(GHC.showRichTokenStream toks) `shouldBe` "-- A simple let expression, to ensure the layout is detected\n\n module Layout.LetExpr where\n\n foo = let x = 1\n y = 2\n in x + y\n\n "
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
(renderSourceTree srcTree) `shouldBe` origSource
-- -----------------------------------------------------------------
{-
it "retrieves the tokens in SourceTree format LetStmt" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/LetStmt.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
(GHC.showRichTokenStream $ bypassGHCBug7351 toks) `shouldBe` "-- A simple let statement, to ensure the layout is detected\n\nmodule Layout.LetStmt where\n\nfoo = do\n let x = 1\n y = 2\n x+y\n\n"
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
{-
(drawTreeCompact layout) `shouldBe`
"0:((1,1),(10,1))\n"++
"1:((1,1),(3,7))\n"++
"1:((3,8),(3,22))\n"++
"1:((3,23),(3,28))\n"++
"1:((5,1),(8,12))\n"++
"2:((5,1),(5,4))\n"++
"2:((5,5),(8,12))\n"++
"3:((5,5),(5,6))\n"++
"3:((5,7),(8,12))\n"++
"4:((5,7),(5,9))\n"++
"4:((6,9),(8,12))(Above FromAlignCol (1,-1) (6,9) (8,12) FromAlignCol (2,-11))\n"++
"5:((6,9),(7,18))\n"++
"6:((6,9),(6,12))\n"++
"6:((6,13),(7,18))(Above None (6,13) (7,18) FromAlignCol (1,-9))\n"++
"7:((6,13),(6,18))\n"++
"8:((6,13),(6,14))\n"++
"8:((6,15),(6,18))\n"++
"9:((6,15),(6,16))\n"++
"9:((6,17),(6,18))\n"++
"7:((7,13),(7,18))\n"++
"8:((7,13),(7,14))\n"++
"8:((7,15),(7,18))\n"++
"9:((7,15),(7,16))\n"++
"9:((7,17),(7,18))\n"++
"5:((8,9),(8,12))\n"++
"6:((8,9),(8,10))\n"++
"6:((8,10),(8,11))\n"++
"6:((8,11),(8,12))\n"++
"1:((10,1),(10,1))\n"
-}
-- (show layout) `shouldBe` ""
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe`
"-- A simple let statement, to ensure the layout is detected\n\nmodule Layout.LetStmt where\n\nfoo = do\n let x = 1\n y = 2\n x+y\n\n"
-- -----------------------------------------------------------------
it "retrieves the tokens in SourceTree format LayoutIn2" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Renaming/LayoutIn2.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
(GHC.showRichTokenStream $ bypassGHCBug7351 toks) `shouldBe` "module LayoutIn2 where\n\n--Layout rule applies after 'where','let','do' and 'of'\n\n--In this Example: rename 'list' to 'ls'.\n\nsilly :: [Int] -> Int\nsilly list = case list of (1:xs) -> 1\n--There is a comment\n (2:xs)\n | x < 10 -> 4 where x = last xs\n otherwise -> 12\n\n"
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- -----------------------------------------------------------------
it "retrieves the tokens in SourceTree format LetIn1" $ do
(t,toks) <- parsedFileGhc "./test/testdata/LiftToToplevel/LetIn1.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
-- (show layout) `shouldBe` ""
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------------------
it "retrieves the tokens in SourceTree format Where" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/Where.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
-- (show layout) `shouldBe` ""
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------------------
it "retrieves the tokens in SourceTree format PatBind" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/PatBind.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
-- (show layout) `shouldBe` ""
{-
(drawTreeCompact layout) `shouldBe`
"0:((1,1),(13,1))\n"++
"1:((1,1),(1,7))\n"++
"1:((1,8),(1,22))\n"++
"1:((1,23),(1,28))\n"++
"1:((4,1),(4,18))\n"++
"2:((4,1),(4,4))\n"++
"2:((4,5),(4,7))\n"++
"2:((4,8),(4,9))\n"++
"2:((4,9),(4,12))\n"++
"2:((4,12),(4,13))\n"++
"2:((4,14),(4,17))\n"++
"2:((4,17),(4,18))\n"++
"1:((5,1),(5,9))\n"++
"2:((5,1),(5,2))\n"++
"2:((5,3),(5,5))\n"++
"2:((5,6),(5,9))\n"++
"1:((6,1),(6,9))\n"++
"2:((6,1),(6,2))\n"++
"2:((6,3),(6,5))\n"++
"2:((6,6),(6,9))\n"++
"1:((7,1),(10,12))\n"++
"2:((7,1),(7,10))\n"++
"2:((7,11),(7,12))\n"++
"2:((7,13),(7,38))\n"++
"3:((7,13),(7,17))\n"++
"3:((7,18),(7,19))\n"++
"3:((7,20),(7,38))\n"++
"4:((7,20),(7,30))\n"++
"5:((7,20),(7,23))\n"++
"5:((7,24),(7,30))\n"++
"6:((7,24),(7,25))\n"++
"6:((7,25),(7,26))\n"++
"6:((7,28),(7,30))\n"++
"4:((7,32),(7,38))\n"++
"5:((7,32),(7,33))\n"++
"5:((7,33),(7,34))\n"++
"5:((7,36),(7,38))\n"++
"2:((8,3),(8,8))\n"++
"2:((9,5),(10,12))(Above FromAlignCol (1,-4) (9,5) (10,12) FromAlignCol (3,-11))\n"++
"3:((9,5),(9,14))\n"++
"4:((9,5),(9,7))\n"++
"4:((9,8),(9,10))\n"++
"4:((9,11),(9,14))\n"++
"3:((10,5),(10,12))\n"++
"4:((10,5),(10,7))\n"++
"4:((10,8),(10,12))\n"++
"5:((10,8),(10,9))\n"++
"5:((10,10),(10,12))\n"++
"1:((13,1),(13,1))\n"
-}
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------------------
it "retrieves the tokens in SourceTree format TokenTest" $ do
(t,toks) <- parsedFileGhc "./test/testdata/TokenTest.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
-- (show layout) `shouldBe` ""
let srcTree = layoutTreeToSourceTree layout
{-
srcTree `shouldBe`
[]
-}
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------------------
it "retrieves the tokens in SourceTree format Md1" $ do
(t,toks) <- parsedFileGhc "./test/testdata/MoveDef/Md1.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
-- (show layout) `shouldBe` ""
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------------------
it "retrieves the tokens in SourceTree format Layout.LetIn1" $ do
(t,toks) <- parsedFileGhc "./test/testdata/TypeUtils/LayoutLet1.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------------------
it "retrieves the tokens in SourceTree format Layout.Comments1" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/Comments1.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
-- (show layout) `shouldBe` ""
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------------------
it "retrieves the tokens in SourceTree format LocToName" $ do
(t,toks) <- parsedFileGhc "./test/testdata/LocToName.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
-- (show layout) `shouldBe` ""
{-
(drawTreeCompact layout) `shouldBe`
"0:((1,1),(25,1))\n"++
"1:((1,1),(1,7))\n"++
"1:((1,8),(1,17))\n"++
"1:((1,18),(12,3))\n"++
"1:((20,1),(24,18))\n"++
"2:((20,1),(20,11))\n"++
"2:((20,12),(20,41))\n"++
"3:((20,12),(20,18))\n"++
"3:((20,19),(20,20))\n"++
"3:((20,21),(20,41))\n"++
"4:((20,21),(20,25))\n"++
"5:((20,21),(20,22))\n"++
"5:((20,23),(20,24))\n"++
"5:((20,24),(20,25))\n"++
"4:((20,26),(20,27))\n"++
"4:((20,28),(20,41))\n"++
"5:((20,28),(20,38))\n"++
"5:((20,39),(20,41))\n"++
"2:((24,1),(24,18))\n"++
"3:((24,1),(24,11))\n"++
"3:((24,12),(24,14))\n"++
"3:((24,15),(24,16))\n"++
"3:((24,17),(24,18))\n"++
"1:((25,1),(25,1))\n"
-}
-- (show layout) `shouldBe` ""
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------------------
it "retrieves the tokens in SourceTree format DupDef.Dd1" $ do
(t,toks) <- parsedFileGhc "./test/testdata/DupDef/Dd1.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
-- (show layout) `shouldBe` ""
let srcTree = layoutTreeToSourceTree layout
-- (showPpr srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-- --------------------------------------
it "retrieves the tokens in SourceTree format Renaming.LayoutIn4" $ do
(t, toks) <- parsedFileGhc "./test/testdata/Renaming/LayoutIn4.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
-- (drawTreeWithToks layout) `shouldBe` ""
let srcTree = layoutTreeToSourceTree layout
-- (showPpr srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-- --------------------------------------
it "retrieves the tokens in SourceTree format Layout.Lift with deletion/insertion 1" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/Lift.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format MoveDef.Demote with deletion/insertion 2" $ do
(t,toks) <- parsedFileGhc "./test/testdata/MoveDef/Demote.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format Layout.FromMd1 with deletion 2" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/FromMd1.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
-- (drawTreeCompact layout) `shouldBe`
-- ""
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format Layout.FromMd1 with deletion 3" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/FromMd1.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
-- (drawTreeCompact layout) `shouldBe`
-- ""
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format Layout.Where2 with deletion 4" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/Where2.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
-- (drawTreeCompact layout) `shouldBe`
-- ""
let srcTree = layoutTreeToSourceTree layout
-- (showPpr srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format TypeUtils.LayoutLet2" $ do
(t,toks) <- parsedFileGhc "./test/testdata/TypeUtils/LayoutLet2.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- -----------------------------------------------------------------
it "retrieves the tokens in SourceTree format Renaming.LayoutIn1" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Renaming/LayoutIn1.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
-- (drawTreeWithToks layout) `shouldBe` ""
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (showPpr srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-- -----------------------------------------------------------------
it "retrieves the tokens in SourceTree format after renaming Renaming.LayoutIn1" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Renaming/LayoutIn1.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- -----------------------------------------------------------------
it "retrieves the tokens in SourceTree format after adding a local decl Layout.Lift" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/Lift.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- -----------------------------------------------------------------
it "retrieves the tokens in SourceTree format after demoting Demote.D2" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Demote/D2.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- -----------------------------------------------------------------
it "retrieves the tokens in SourceTree format after add params AddParams1" $ do
(t,toks) <- parsedFileGhc "./test/testdata/AddParams1.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- -----------------------------------------------------------------
it "retrieves the tokens in SourceTree format after renaming Renaming.D5" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Renaming/D5.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- -----------------------------------------------------------------
it "retrieves the tokens in SourceTree format after renaming Layout.D5Simple" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/D5Simple.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- -----------------------------------------------------------------
it "retrieves the tokens in SourceTree format after renaming TypeUtils.LayoutLet2" $ do
(t,toks) <- parsedFileGhc "./test/testdata/TypeUtils/LayoutLet2.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- -----------------------------------------------------------------
it "retrieves the tokens in SourceTree format after renaming Renaming.LayoutIn3" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Renaming/LayoutIn3.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- -----------------------------------------------------------------
it "retrieves the tokens in SourceTree format TypeUtils.Empty" $ do
(t,toks) <- parsedFileGhc "./test/testdata/TypeUtils/Empty.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
(GHC.showRichTokenStream toks) `shouldBe` "module Empty where\n\n "
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (showGhc srcTree) `shouldBe` ""
-- (show $ retrieveLines srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-- -----------------------------------------------------------------
it "retrieves the tokens in SourceTree format after renaming Layout.Do1" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/Do1.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (show srcTree) `shouldBe`
-- ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format Move1" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/Move1.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
{-
(drawTreeCompact layout) `shouldBe`
""
-}
let srcTree = layoutTreeToSourceTree layout
-- (showGhc srcTree) `shouldBe` ""
-- (show $ retrieveLines srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format HsDo" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/HsDo.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
{-
(drawTreeCompact layout) `shouldBe`
""
-}
let srcTree = layoutTreeToSourceTree layout
-- (showGhc srcTree) `shouldBe` ""
-- (show $ retrieveLines srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format forall" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/ForAll.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
{-
(drawTreeCompact layout) `shouldBe`
""
-}
let srcTree = layoutTreeToSourceTree layout
-- (showGhc srcTree) `shouldBe` ""
-- (show $ retrieveLines srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format DerivD" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/Derive.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
{-
(drawTreeCompact layout) `shouldBe`
""
-}
let srcTree = layoutTreeToSourceTree layout
-- (showGhc srcTree) `shouldBe` ""
-- (show $ retrieveLines srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format Class" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/Class.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
{-
(drawTreeCompact layout) `shouldBe`
""
-}
let srcTree = layoutTreeToSourceTree layout
-- (showGhc srcTree) `shouldBe` ""
-- (show $ retrieveLines srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format default decl" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/Default.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
{-
(drawTreeCompact layout) `shouldBe`
""
-}
let srcTree = layoutTreeToSourceTree layout
-- (showGhc srcTree) `shouldBe` ""
-- (show $ retrieveLines srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format foreign decl" $ do
pendingWith "ghc-mod 4.x cannot load this file"
{-
(t,toks) <- parsedFileGhc "./test/testdata/Layout/Foreign.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
(invariant layout) `shouldBe` []
let srcTree = layoutTreeToSourceTree layout
-- (showGhc srcTree) `shouldBe` ""
-- (show $ retrieveLines srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-}
-- ---------------------------------
it "retrieves the tokens in SourceTree format template haskell" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/TH.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
{-
(drawTreeCompact layout) `shouldBe`
""
-}
let srcTree = layoutTreeToSourceTree layout
-- (showGhc srcTree) `shouldBe` ""
-- (show $ retrieveLines srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format PArr" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/PArr.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- (show toks) `shouldBe` ""
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
{-
(drawTreeCompact layout) `shouldBe`
""
-}
let srcTree = layoutTreeToSourceTree layout
-- (showGhc srcTree) `shouldBe` ""
-- (show $ retrieveLines srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format Arrow" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Layout/Arrow.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
{-
(drawTreeCompact layout) `shouldBe`
""
-}
let srcTree = layoutTreeToSourceTree layout
-- (showGhc srcTree) `shouldBe` ""
-- (show $ retrieveLines srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format TemplateHaskell" $ do
(t,toks) <- parsedFileGhc "./test/testdata/TH/Main.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
{-
(drawTreeCompact layout) `shouldBe`
""
-}
let srcTree = layoutTreeToSourceTree layout
-- (showGhc srcTree) `shouldBe` ""
-- (show $ retrieveLines srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format Utils.hs" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Renaming/Utils.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
{-
(drawTreeCompact layout) `shouldBe`
""
-}
let srcTree = layoutTreeToSourceTree layout
-- (showGhc srcTree) `shouldBe` ""
-- (show $ retrieveLines srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-- ---------------------------------
it "retrieves the tokens in SourceTree format Utils.hs with renaming" $ do
(t,toks) <- parsedFileGhc "./test/testdata/Renaming/Utils.hs"
let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t
-- let renamed = fromJust $ GHC.tm_renamed_source t
-- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` ""
let origSource = (GHC.showRichTokenStream $ bypassGHCBug7351 toks)
let layout = allocTokens parsed toks
(show $ retrieveTokens layout) `shouldBe` (show toks)
-- (invariant layout) `shouldBe` []
{-
(drawTreeCompact layout) `shouldBe`
""
-}
let srcTree = layoutTreeToSourceTree layout
-- (showGhc srcTree) `shouldBe` ""
-- (show $ retrieveLines srcTree) `shouldBe` ""
(renderSourceTree srcTree) `shouldBe` origSource
-}
-- EOF
|
alanz/haskell-token-utils
|
test/DualTreeGhc2Spec.hs
|
unlicense
| 37,354 | 0 | 18 | 8,725 | 246 | 133 | 113 | 23 | 1 |
module Problem020 where
import Data.Char (digitToInt)
import Data.List (foldl1')
main =
print $ sum $ map digitToInt $ show $ f 100
where f x = foldl1' (*) [1..x]
|
vasily-kartashov/playground
|
euler/problem-020.hs
|
apache-2.0
| 170 | 0 | 8 | 36 | 73 | 40 | 33 | 6 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# OPTIONS_HADDOCK hide #-}
module DNA.Types where
import Control.Monad.Reader
import Control.Monad.State.Strict (StateT)
import Control.Monad.Except
import Control.Distributed.Process
import Control.Distributed.Process.Serializable (Serializable)
import Data.Binary (Binary(..))
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
----------------------------------------------------------------
-- MonadProcess
----------------------------------------------------------------
-- | Monad to which computations in the 'Process' could be lifted
class MonadIO m => MonadProcess m where
liftP :: Process a -> m a
instance MonadProcess Process where
liftP = id
instance MonadProcess m => MonadProcess (StateT s m) where
liftP = lift . liftP
instance MonadProcess m => MonadProcess (ExceptT e m) where
liftP = lift . liftP
instance MonadProcess m => MonadProcess (ReaderT r m) where
liftP = lift . liftP
----------------------------------------------------------------
-- Other types
----------------------------------------------------------------
-- | Resources allocated to single process. It always have access to
-- node it owns and possibly list of other nodes.
data VirtualCAD = VirtualCAD
{ vcadNode :: NodeInfo
, vcadNodePool :: [NodeInfo]
}
deriving (Show,Eq,Ord,Typeable,Generic)
instance Binary VirtualCAD
-- | Information about node
data NodeInfo = NodeInfo
{ nodeId :: NodeId
}
deriving (Show,Eq,Ord,Typeable,Generic)
instance Binary NodeInfo
-- | Describes whether some entity should be local to node or could be
-- possibly on remote node.
data Location = Remote
| Local
deriving (Show,Eq,Ord,Typeable,Generic)
instance Binary Location
-- | Rank of actor
newtype Rank = Rank Int
deriving (Show,Eq,Ord,Typeable,Binary)
-- | Size of group of proceesses
newtype GroupSize = GroupSize Int
deriving (Show,Eq,Ord,Typeable,Binary)
-- | Describes how failures in a group of processes are treated.
data GroupType
= Normal -- ^ If a single process in the group fails, it is
-- treated as if the whole group failed.
| Failout -- ^ The result of failed processes will be silently
-- discarded.
deriving (Show,Typeable,Generic)
instance Binary GroupType
-- | This describes how many nodes we want to allocate either to a
-- single actor process or to the group of processes as whole. We can
-- either request exactly /n/ nodes or a fraction of the total pool of
-- free nodes. If there isn't enough nodes in the pool to satisfy
-- request it will cause runtime error.
--
-- For example @N 4@ requests exactly for nodes. And @Frac 0.5@
-- requests half of all currently available nodes.
--
-- Local node (which could be added using 'DNA.useLocal') is added in
-- addition to this. If in the end 0 nodes will be allocated it will
-- cause runtime error.
data Res
= N Int -- ^ Fixed number of nodes
| Frac Double -- ^ Fraction of nodes. Should lie in /(0,1]/ range.
deriving (Show,Typeable,Generic)
instance Binary Res
-- | Describes how to divide allocated nodes between worker
-- processes.
data ResGroup
= NWorkers Int
-- ^ divide nodes evenly between /n/ actors.
| NNodes Int
-- ^ Allocate no less that /n/ nodes for each actors. DSL will
-- try to create as many actor as possible under given
-- constraint
deriving (Show,Typeable,Generic)
instance Binary ResGroup
-- | ID of group of processes
newtype AID = AID Int
deriving (Show,Eq,Ord,Typeable,Binary)
-- | ID of actor-local variable
newtype VID = VID Int
deriving (Show,Eq,Ord,Typeable,Binary)
----------------------------------------------------------------
-- Control messages
----------------------------------------------------------------
-- | Command that process is ready
data DoneTask = DoneTask
deriving (Show,Eq,Ord,Typeable,Generic)
instance Binary DoneTask
-- | Terminate process
newtype Terminate = Terminate String
deriving (Show,Eq,Ord,Typeable,Generic)
instance Binary Terminate
-- | Actor execution time exceeded quota
newtype Timeout = Timeout AID
deriving (Show,Eq,Ord,Typeable,Generic)
instance Binary Timeout
-- | Actor just sent data to following destination
data SentTo = SentTo AID ProcessId [SendPortId]
deriving (Show,Eq,Ord,Typeable,Generic)
instance Binary SentTo
-- | Acknowledgement of data transmission
data AckSend = AckSend
deriving (Show,Eq,Ord,Typeable,Generic)
instance Binary AckSend
----------------------------------------------------------------
-- Type tags for shell actors
----------------------------------------------------------------
-- | The actor receives/produces a single value, respectively.
data Val a
deriving (Typeable)
-- | The actor receives/produces an unordered group of values.
data Grp a
deriving (Typeable)
-- | Only appears as an input tag. It means that we may want to
-- scatter values to a set of running actors.
data Scatter a
deriving (Typeable)
----------------------------------------------------------------
-- Shell actors
----------------------------------------------------------------
-- | Address of receive port of an actor.
--
-- Messages are sent via SendPort which is stored as 'Message' to
-- erase type.
data RecvAddr
= RcvSimple Message
-- ^ Actor/variable that receive single value
| RcvReduce [(Message,SendPort Int)]
-- ^ Reduce actor or actors. It's list of ports to send data to
-- and channels for sending number of values to expect
| RcvTree [(Message,SendPort Int)]
-- ^ Ports of tree reducer. They have subtly different meaning.
-- In ordinary collector data is send to each collector. In
-- tree collector destination is determined by rank.
| RcvGrp [Message]
-- ^ Group of simple actors
deriving (Show,Typeable,Generic)
instance Binary RecvAddr
data RecvAddrType
= RcvTySimple
-- ^ Actor/variable that receive single value
| RcvTyReduce
-- ^ Reduce actor or actors
| RcvTyTree
-- ^ Tree reduction actor
| RcvTyGrp
-- ^ Group of simple actors
deriving (Show,Typeable,Generic)
instance Binary RecvAddrType
-- | Handle of a running actor or group. Note that we treat actors and
-- groups of actors uniformly here. Shell data type has two type
-- parameters which describe what kind of data actor receives or
-- produces. For example:
--
-- > Shell (InputTag a) (OutputTag b)
--
-- Also both input and output types have tags which describe how many
-- messages data type produces and how this actor could be connected
-- with others. It means that shell receives message(s) of type a and
-- produce message(s) of type b. We support tags 'Val', 'Grp' and
-- 'Scatter'.
newtype Shell a b = Shell AID
deriving (Typeable,Generic,Binary)
-- | Destination for actor computation
data Dest a
= SendLocally (SendPort a)
-- ^ Send result using using unsafe primitive
| SendRemote [SendPort a]
-- ^ Send result using standard primitives
deriving (Show,Typeable,Generic)
instance Serializable a => Binary (Dest a)
|
SKA-ScienceDataProcessor/RC
|
MS4/lib/DNA/Types.hs
|
apache-2.0
| 7,443 | 0 | 9 | 1,562 | 1,109 | 645 | 464 | -1 | -1 |
module HelperSequences.A032741 (a032741) where
import HelperSequences.A000005 (a000005)
a032741 :: Integer -> Integer
a032741 0 = 0
a032741 n = a000005 n - 1
|
peterokagey/haskellOEIS
|
src/HelperSequences/A032741.hs
|
apache-2.0
| 159 | 0 | 6 | 24 | 53 | 29 | 24 | 5 | 1 |
module Network.Tragencap.IP.MacAddr where
import Data.Word
import Data.Binary.Bits.Get
import Data.Binary.Get
import Control.Applicative
import Network.Tragencap.Core
import Network.Tragencap.Prelude
-- * Mac Address
---------------------------------------
data MacAddr
= MacAddr
{ mac_1 :: Word8
, mac_2 :: Word8
, mac_3 :: Word8
, mac_4 :: Word8
, mac_5 :: Word8
, mac_6 :: Word8
}
deriving (Eq, Ord, Show)
macAddrGetter :: Get MacAddr
macAddrGetter = runBitGet $ block (MacAddr <$> word8 8 <*> word8 8 <*> word8 8 <*> word8 8 <*> word8 8 <*> word8 8)
instance SimpleParser MacAddr where
simpleParser = get2parser macAddrGetter
|
kelemzol/tragencap
|
src/Network/Tragencap/IP/MacAddr.hs
|
apache-2.0
| 679 | 0 | 14 | 138 | 186 | 106 | 80 | 20 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QMovie.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:35
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Enums.Gui.QMovie (
MovieState
, QMovieCacheMode, eCacheAll
)
where
import Qtc.Classes.Base
import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr)
import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int)
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
data CMovieState a = CMovieState a
type MovieState = QEnum(CMovieState Int)
ieMovieState :: Int -> MovieState
ieMovieState x = QEnum (CMovieState x)
instance QEnumC (CMovieState Int) where
qEnum_toInt (QEnum (CMovieState x)) = x
qEnum_fromInt x = QEnum (CMovieState x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> MovieState -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
instance QeNotRunning MovieState where
eNotRunning
= ieMovieState $ 0
instance QePaused MovieState where
ePaused
= ieMovieState $ 1
instance QeRunning MovieState where
eRunning
= ieMovieState $ 2
data CQMovieCacheMode a = CQMovieCacheMode a
type QMovieCacheMode = QEnum(CQMovieCacheMode Int)
ieQMovieCacheMode :: Int -> QMovieCacheMode
ieQMovieCacheMode x = QEnum (CQMovieCacheMode x)
instance QEnumC (CQMovieCacheMode Int) where
qEnum_toInt (QEnum (CQMovieCacheMode x)) = x
qEnum_fromInt x = QEnum (CQMovieCacheMode x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> QMovieCacheMode -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
instance QeCacheNone QMovieCacheMode where
eCacheNone
= ieQMovieCacheMode $ 0
eCacheAll :: QMovieCacheMode
eCacheAll
= ieQMovieCacheMode $ 1
|
uduki/hsQt
|
Qtc/Enums/Gui/QMovie.hs
|
bsd-2-clause
| 4,153 | 0 | 18 | 954 | 1,125 | 556 | 569 | 98 | 1 |
{-# OPTIONS -fglasgow-exts -#include "../include/gui/qtc_hs_QItemEditorCreatorBase_h.h" #-}
-----------------------------------------------------------------------------
{-| Module : QItemEditorCreatorBase_h.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:17
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QItemEditorCreatorBase_h where
import Qtc.Enums.Base
import Qtc.Classes.Base
import Qtc.Classes.Qccs_h
import Qtc.Classes.Core_h
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui_h
import Qtc.ClassTypes.Gui
import Foreign.Marshal.Array
instance QunSetUserMethod (QItemEditorCreatorBase ()) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QItemEditorCreatorBase_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
foreign import ccall "qtc_QItemEditorCreatorBase_unSetUserMethod" qtc_QItemEditorCreatorBase_unSetUserMethod :: Ptr (TQItemEditorCreatorBase a) -> CInt -> CInt -> IO (CBool)
instance QunSetUserMethod (QItemEditorCreatorBaseSc a) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QItemEditorCreatorBase_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
instance QunSetUserMethodVariant (QItemEditorCreatorBase ()) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QItemEditorCreatorBase_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariant (QItemEditorCreatorBaseSc a) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QItemEditorCreatorBase_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariantList (QItemEditorCreatorBase ()) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QItemEditorCreatorBase_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QunSetUserMethodVariantList (QItemEditorCreatorBaseSc a) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QItemEditorCreatorBase_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QsetUserMethod (QItemEditorCreatorBase ()) (QItemEditorCreatorBase x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QItemEditorCreatorBase setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QItemEditorCreatorBase_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QItemEditorCreatorBase_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQItemEditorCreatorBase x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QItemEditorCreatorBase_setUserMethod" qtc_QItemEditorCreatorBase_setUserMethod :: Ptr (TQItemEditorCreatorBase a) -> CInt -> Ptr (Ptr (TQItemEditorCreatorBase x0) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethod_QItemEditorCreatorBase :: (Ptr (TQItemEditorCreatorBase x0) -> IO ()) -> IO (FunPtr (Ptr (TQItemEditorCreatorBase x0) -> IO ()))
foreign import ccall "wrapper" wrapSetUserMethod_QItemEditorCreatorBase_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QItemEditorCreatorBaseSc a) (QItemEditorCreatorBase x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QItemEditorCreatorBase setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QItemEditorCreatorBase_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QItemEditorCreatorBase_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQItemEditorCreatorBase x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QsetUserMethod (QItemEditorCreatorBase ()) (QItemEditorCreatorBase x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QItemEditorCreatorBase setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QItemEditorCreatorBase_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QItemEditorCreatorBase_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQItemEditorCreatorBase x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QItemEditorCreatorBase_setUserMethodVariant" qtc_QItemEditorCreatorBase_setUserMethodVariant :: Ptr (TQItemEditorCreatorBase a) -> CInt -> Ptr (Ptr (TQItemEditorCreatorBase x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethodVariant_QItemEditorCreatorBase :: (Ptr (TQItemEditorCreatorBase x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> IO (FunPtr (Ptr (TQItemEditorCreatorBase x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))))
foreign import ccall "wrapper" wrapSetUserMethodVariant_QItemEditorCreatorBase_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QItemEditorCreatorBaseSc a) (QItemEditorCreatorBase x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QItemEditorCreatorBase setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QItemEditorCreatorBase_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QItemEditorCreatorBase_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQItemEditorCreatorBase x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QunSetHandler (QItemEditorCreatorBase ()) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QItemEditorCreatorBase_unSetHandler cobj_qobj cstr_evid
foreign import ccall "qtc_QItemEditorCreatorBase_unSetHandler" qtc_QItemEditorCreatorBase_unSetHandler :: Ptr (TQItemEditorCreatorBase a) -> CWString -> IO (CBool)
instance QunSetHandler (QItemEditorCreatorBaseSc a) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QItemEditorCreatorBase_unSetHandler cobj_qobj cstr_evid
instance QsetHandler (QItemEditorCreatorBase ()) (QItemEditorCreatorBase x0 -> QObject t1 -> IO (QObject t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QItemEditorCreatorBase1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QItemEditorCreatorBase1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QItemEditorCreatorBase_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQItemEditorCreatorBase x0) -> Ptr (TQObject t1) -> IO (Ptr (TQObject t0))
setHandlerWrapper x0 x1
= do x0obj <- objectFromPtr_nf x0
x1obj <- qObjectFromPtr x1
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QItemEditorCreatorBase_setHandler1" qtc_QItemEditorCreatorBase_setHandler1 :: Ptr (TQItemEditorCreatorBase a) -> CWString -> Ptr (Ptr (TQItemEditorCreatorBase x0) -> Ptr (TQObject t1) -> IO (Ptr (TQObject t0))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QItemEditorCreatorBase1 :: (Ptr (TQItemEditorCreatorBase x0) -> Ptr (TQObject t1) -> IO (Ptr (TQObject t0))) -> IO (FunPtr (Ptr (TQItemEditorCreatorBase x0) -> Ptr (TQObject t1) -> IO (Ptr (TQObject t0))))
foreign import ccall "wrapper" wrapSetHandler_QItemEditorCreatorBase1_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QItemEditorCreatorBaseSc a) (QItemEditorCreatorBase x0 -> QObject t1 -> IO (QObject t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QItemEditorCreatorBase1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QItemEditorCreatorBase1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QItemEditorCreatorBase_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQItemEditorCreatorBase x0) -> Ptr (TQObject t1) -> IO (Ptr (TQObject t0))
setHandlerWrapper x0 x1
= do x0obj <- objectFromPtr_nf x0
x1obj <- qObjectFromPtr x1
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QcreateWidget_h (QItemEditorCreatorBase ()) ((QWidget t1)) where
createWidget_h x0 (x1)
= withQWidgetResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QItemEditorCreatorBase_createWidget cobj_x0 cobj_x1
foreign import ccall "qtc_QItemEditorCreatorBase_createWidget" qtc_QItemEditorCreatorBase_createWidget :: Ptr (TQItemEditorCreatorBase a) -> Ptr (TQWidget t1) -> IO (Ptr (TQWidget ()))
instance QcreateWidget_h (QItemEditorCreatorBaseSc a) ((QWidget t1)) where
createWidget_h x0 (x1)
= withQWidgetResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QItemEditorCreatorBase_createWidget cobj_x0 cobj_x1
instance QsetHandler (QItemEditorCreatorBase ()) (QItemEditorCreatorBase x0 -> IO (String)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QItemEditorCreatorBase2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QItemEditorCreatorBase2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QItemEditorCreatorBase_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQItemEditorCreatorBase x0) -> IO (CWString)
setHandlerWrapper x0
= do x0obj <- objectFromPtr_nf x0
let rv =
if (objectIsNull x0obj)
then return ("")
else _handler x0obj
rvf <- rv
withCWString rvf $ \cstr_rvf -> return (cstr_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QItemEditorCreatorBase_setHandler2" qtc_QItemEditorCreatorBase_setHandler2 :: Ptr (TQItemEditorCreatorBase a) -> CWString -> Ptr (Ptr (TQItemEditorCreatorBase x0) -> IO (CWString)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QItemEditorCreatorBase2 :: (Ptr (TQItemEditorCreatorBase x0) -> IO (CWString)) -> IO (FunPtr (Ptr (TQItemEditorCreatorBase x0) -> IO (CWString)))
foreign import ccall "wrapper" wrapSetHandler_QItemEditorCreatorBase2_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QItemEditorCreatorBaseSc a) (QItemEditorCreatorBase x0 -> IO (String)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QItemEditorCreatorBase2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QItemEditorCreatorBase2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QItemEditorCreatorBase_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQItemEditorCreatorBase x0) -> IO (CWString)
setHandlerWrapper x0
= do x0obj <- objectFromPtr_nf x0
let rv =
if (objectIsNull x0obj)
then return ("")
else _handler x0obj
rvf <- rv
withCWString rvf $ \cstr_rvf -> return (cstr_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QvaluePropertyName_h (QItemEditorCreatorBase ()) (()) where
valuePropertyName_h x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QItemEditorCreatorBase_valuePropertyName cobj_x0
foreign import ccall "qtc_QItemEditorCreatorBase_valuePropertyName" qtc_QItemEditorCreatorBase_valuePropertyName :: Ptr (TQItemEditorCreatorBase a) -> IO (Ptr (TQString ()))
instance QvaluePropertyName_h (QItemEditorCreatorBaseSc a) (()) where
valuePropertyName_h x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QItemEditorCreatorBase_valuePropertyName cobj_x0
|
uduki/hsQt
|
Qtc/Gui/QItemEditorCreatorBase_h.hs
|
bsd-2-clause
| 17,804 | 0 | 18 | 3,666 | 5,289 | 2,524 | 2,765 | -1 | -1 |
{-# LANGUAGE CPP, ForeignFunctionInterface #-}
-- | Socket functions using System.Event instead of GHC's I/O manager.
module EventSocket
(
accept
, connect
, recv
, send
, sendAll
, c_recv
, c_send
) where
import Control.Concurrent (modifyMVar_, newMVar)
import Control.Monad (liftM, when)
import Data.ByteString (ByteString)
import Data.Word (Word8)
import qualified Data.ByteString as B
import qualified Data.ByteString.Internal as B
import Data.ByteString.Unsafe (unsafeUseAsCStringLen)
import Foreign.C.Types (CChar, CInt, CSize)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Marshal.Utils (with)
import Foreign.ForeignPtr (withForeignPtr)
import Foreign.Ptr (Ptr, castPtr)
import Foreign.C.Error (Errno(..), eINPROGRESS, eINTR,
errnoToIOError, getErrno, throwErrno)
#if __GLASGOW_HASKELL__ < 612
import GHC.IOBase (IOErrorType(..))
#else
import GHC.IO.Exception (IOErrorType(..))
#endif
import Network.Socket hiding (accept, connect, recv, send)
import Network.Socket.Internal
import Prelude hiding (repeat)
import System.Event.Thread
import System.IO.Error (ioeSetErrorString, mkIOError)
import EventUtil
connect :: Socket -- Unconnected Socket
-> SockAddr -- Socket address stuff
-> IO ()
connect sock@(MkSocket s _family _stype _protocol socketStatus) addr = do
modifyMVar_ socketStatus $ \currentStatus -> do
if currentStatus /= NotConnected && currentStatus /= Bound
then
ioError (userError ("connect: can't peform connect on socket in status " ++
show currentStatus))
else do
withSockAddr addr $ \p_addr sz -> do
let connectLoop = do
r <- c_connect s p_addr (fromIntegral sz)
if r == -1
then do
err <- getErrno
case () of
_ | err == eINTR -> connectLoop
_ | err == eINPROGRESS -> connectBlocked
_ -> throwSocketError "connect"
else return r
connectLoop
return Connected
where
connectBlocked = do
threadWaitWrite (fromIntegral s)
err <- getSocketOption sock SoError
if (err == 0)
then return 0
else ioError (errnoToIOError "connect"
(Errno (fromIntegral err))
Nothing Nothing)
foreign import ccall unsafe "connect"
c_connect :: CInt -> Ptr SockAddr -> CInt{-CSockLen?? -} -> IO CInt
------------------------------------------------------------------------
-- Receiving
recv :: Socket -> Int -> IO ByteString
recv (MkSocket s _ _ _ _) nbytes
| nbytes <= 0 = ioError (mkInvalidRecvArgError "Network.Socket.ByteString.recv")
| otherwise = do
fp <- B.mallocByteString nbytes
n <- withForeignPtr fp $ recvInner s nbytes
if n <= 0
then return B.empty
else return $! B.PS fp 0 n
recvInner :: CInt -> Int -> Ptr Word8 -> IO Int
recvInner s nbytes ptr = do
len <- throwErrnoIfMinus1Retry_repeatOnBlock "recv"
(threadWaitRead (fromIntegral s)) $
c_recv s (castPtr ptr) (fromIntegral nbytes) 0{-flags-}
case fromIntegral len of
(-1) -> do errno <- getErrno
if errno == eINTR
then recvInner s nbytes ptr
else throwErrno "Network.Socket.ByteString.recv"
n -> return n
------------------------------------------------------------------------
-- Sending
-- | Send data to the socket. The socket must be connected to a
-- remote socket. Returns the number of bytes sent. Applications are
-- responsible for ensuring that all data has been sent.
send :: Socket -- ^ Connected socket
-> ByteString -- ^ Data to send
-> IO Int -- ^ Number of bytes sent
send (MkSocket s _ _ _ _) xs =
unsafeUseAsCStringLen xs $ \(str, len) ->
liftM fromIntegral $
throwSocketErrorIfMinus1RetryMayBlock "send"
(threadWaitWrite (fromIntegral s)) $
c_send s str (fromIntegral len) 0
-- | Send data to the socket. The socket must be connected to a
-- remote socket. Unlike 'send', this function continues to send data
-- until either all data has been sent or an error occurs. On error,
-- an exception is raised, and there is no way to determine how much
-- data, if any, was successfully sent.
sendAll :: Socket -- ^ Connected socket
-> ByteString -- ^ Data to send
-> IO ()
sendAll sock bs = do
sent <- send sock bs
when (sent < B.length bs) $ sendAll sock (B.drop sent bs)
------------------------------------------------------------------------
-- Accepting
accept :: Socket -> IO (Socket, SockAddr)
accept (MkSocket s family stype protocol _status) = do
let sz = sizeOfSockAddrByFamily family
allocaBytes sz $ \ sockaddr -> do
with (fromIntegral sz) $ \ ptr_len -> do
new_sock <- throwSocketErrorIfMinus1RetryMayBlock "accept"
(threadWaitRead (fromIntegral s)) $
c_accept s sockaddr ptr_len
setNonBlocking (fromIntegral new_sock)
addr <- peekSockAddr sockaddr
new_status <- newMVar Connected
return (MkSocket new_sock family stype protocol new_status, addr)
mkInvalidRecvArgError :: String -> IOError
mkInvalidRecvArgError loc = ioeSetErrorString (mkIOError
InvalidArgument
loc Nothing Nothing)
"non-positive length"
foreign import ccall unsafe "sys/socket.h accept"
c_accept :: CInt -> Ptr SockAddr -> Ptr CInt{-CSockLen?? -} -> IO CInt
foreign import ccall unsafe "sys/socket.h send"
c_send :: CInt -> Ptr a -> CSize -> CInt -> IO CInt
foreign import ccall unsafe "sys/socket.h recv"
c_recv :: CInt -> Ptr CChar -> CSize -> CInt -> IO CInt
|
tibbe/event
|
benchmarks/EventSocket.hs
|
bsd-2-clause
| 5,914 | 2 | 32 | 1,606 | 1,416 | 735 | 681 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Handler.Home where
import Import
import Text.Blaze (toMarkup)
import Yesod.Auth
getHomeR :: Handler RepHtml
getHomeR = maybeAuthId >>= getHomeR' where
getHomeR' :: Maybe UserId -> Handler RepHtml
getHomeR' Nothing = defaultLayout $ do
title <- lift appTitle
setTitle $ toMarkup title
$(widgetFile "homepage")
getHomeR' (Just _) = redirect TasksR
|
samstokes/yesodoro-reboot
|
Handler/Home.hs
|
bsd-2-clause
| 414 | 0 | 12 | 79 | 118 | 59 | 59 | 13 | 2 |
{- chris
zipper0.hs:18:5:
"zipper0.hs" (line 19, column 5):
data Stack a = Stack { focus :: a
^
unexpected reserved word "data"
expecting variable identifier
-}
{-# LANGUAGE QuasiQuotes #-}
import LiquidHaskell
import Prelude hiding (reverse)
import Data.Set
data Stack a = Stack { focus :: !a -- focused thing in this set
, up :: [a] -- jokers to the left
, down :: [a] } -- jokers to the right
[lq| type UListDif a N = {v:[a] | ((not (Set_mem N (listElts v))) && (Set_emp (listDup v)))} |]
[lq|
data Stack a = Stack { focus :: a
, up :: UListDif a focus
, down :: UListDif a focus }
|]
[lq|
measure listDup :: [a] -> (Set a)
listDup([]) = {v | Set_emp v }
listDup(x:xs) = {v | v = if (Set_mem x (listElts xs)) then (Set_cup (Set_sng x) (listDup xs)) else (listDup xs) }
|]
[lq| type UStack a = {v:Stack a |(Set_emp (Set_cap (listElts (getUp v)) (listElts (getDown v))))}|]
[lq| measure getFocus :: forall a. (Stack a) -> a
getFocus (Stack focus up down) = focus
|]
[lq| measure getUp :: forall a. (Stack a) -> [a]
getUp (Stack focus up down) = up
|]
[lq| measure getDown :: forall a. (Stack a) -> [a]
getDown (Stack focus up down) = down
|]
-- QUALIFIERS
[lq| q :: x:a -> {v:[a] |(not (Set_mem x (listElts v)))} |]
q :: a -> [a]
q = undefined
[lq| q1 :: x:a -> {v:[a] |(Set_mem x (listElts v))} |]
q1 :: a -> [a]
q1 = undefined
[lq| q0 :: x:a -> {v:[a] |(Set_emp(listDup v))} |]
q0 :: a -> [a]
q0 = undefined
[lq| focusUp :: UStack a -> UStack a |]
focusUp :: Stack a -> Stack a
focusUp (Stack t [] rs) = Stack x xs [] where (x:xs) = reverse (t:rs)
focusUp (Stack t (l:ls) rs) = Stack l ls (t:rs)
[lq| focusDown :: UStack a -> UStack a |]
focusDown :: Stack a -> Stack a
focusDown = reverseStack . focusUp . reverseStack
-- | reverse a stack: up becomes down and down becomes up.
[lq| reverseStack :: UStack a -> UStack a |]
reverseStack :: Stack a -> Stack a
reverseStack (Stack t ls rs) = Stack t rs ls
-- TODO ASSUMES
[lq| reverse :: {v:[a] | (Set_emp (listDup v))} -> {v:[a]|(Set_emp (listDup v))} |]
reverse :: [a] -> [a]
reverse = undefined
|
spinda/liquidhaskell
|
tests/gsoc15/unknown/pos/zipper0.hs
|
bsd-3-clause
| 2,240 | 0 | 9 | 595 | 412 | 241 | 171 | 38 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Network.AWS.S3Object
-- Copyright : (c) Greg Heartsfield 2007
-- License : BSD3
--
-- Object interface for Amazon S3
-- API Version 2006-03-01
-- <http://docs.amazonwebservices.com/AmazonS3/2006-03-01/>
-----------------------------------------------------------------------------
module Network.AWS.S3Object (
-- * Function Types
sendObject, copyObject, copyObjectWithReplace, getObject,
getObjectInfo, deleteObject, publicUriForSeconds,
publicUriUntilTime, setStorageClass, getStorageClass,
rewriteStorageClass,
-- * Data Types
S3Object(..), StorageClass(..)
) where
import Network.AWS.Authentication as Auth
import Network.AWS.AWSResult
import Network.AWS.AWSConnection
import Network.HTTP
import Network.URI
import System.Time
import Data.List(lookup)
import qualified Data.ByteString.Lazy.Char8 as L
-- | An object that can be stored and retrieved from S3.
data S3Object =
S3Object { -- | Name of the bucket containing this object
obj_bucket :: String,
-- | URI of the object. Subresources ("?acl" or
-- | "?torrent") should be suffixed onto this name.
obj_name :: String,
-- | A standard MIME type describing the format of the
-- contents. If not specified, @binary/octet-stream@ is
-- used.
content_type :: String,
-- | Object metadata in (key,value) pairs. Key names
-- should use the prefix @x-amz-meta-@ to be stored with
-- the object. The total HTTP request must be under 4KB,
-- including these headers.
obj_headers :: [(String, String)],
-- | Object data.
obj_data :: L.ByteString
} deriving (Read, Show)
data StorageClass = STANDARD | REDUCED_REDUNDANCY
deriving (Read, Show, Eq)
-- Amazon header key for storage class
storageHeader = "x-amz-storage-class"
-- | Add required headers for the storage class.
-- Use this in combination with 'sendObject' for new objects. To
-- modify the storage class of existing objects, use
-- 'rewriteStorageClass'. Using reduced redundancy for object storage
-- trades off redundancy for storage costs.
setStorageClass :: StorageClass -- ^ Storage class to request
-> S3Object -- ^ Object to modify
-> S3Object -- ^ Object with storage class headers set, ready to be sent
setStorageClass sc obj = obj {obj_headers = addToAL
(obj_headers obj)
storageHeader (show sc)}
-- | Retrieve the storage class of a local S3Object.
-- Does not work for objects retrieved with 'getObject', since the
-- required header values are not returned. Use
-- 'getObjectStorageClass' or 'listObjects' from S3Bucket module to
-- determine storage class of existing objects.
getStorageClass :: S3Object -- ^ Object to inspect
-> Maybe StorageClass -- ^ Requested storage class, Nothing if unspecified
getStorageClass obj = case stg_values of
[] -> Nothing
x -> Just (read (head x))
where
hdrs = obj_headers obj
stg_hdrs = filter (\x -> fst x == storageHeader) hdrs
stg_values = map fst stg_hdrs
-- | Change the storage class (and only the storage class) of an existing object.
-- This actually performs a copy to the same location, preserving metadata.
-- It is not clear to me whether ACLs are preserved when copying to the same location.
-- For best performance, we must not change other headers during storage class
-- changes.
rewriteStorageClass :: AWSConnection -- ^ AWS connection information
-> StorageClass -- ^ New storage class for object
-> S3Object -- ^ Object to modify
-> IO (AWSResult S3Object) -- ^ Server response
rewriteStorageClass aws sc obj =
copyObject aws obj (setStorageClass sc (obj {obj_headers = []}))
-- | Send data for an object.
-- If the header "Content-Length" is not set, all content must be read into
-- memory prior to sending.
sendObject :: AWSConnection -- ^ AWS connection information
-> S3Object -- ^ Object to add to a bucket
-> IO (AWSResult ()) -- ^ Server response
sendObject aws obj =
do res <- Auth.runAction (S3Action aws (urlEncode (obj_bucket obj))
(urlEncode (obj_name obj))
""
(("Content-Type", (content_type obj)) :
obj_headers obj)
(obj_data obj) PUT)
return (either Left (\_ -> Right ()) res)
-- | Create a pre-signed request URI. Anyone can use this to request
-- an object until the specified date.
publicUriUntilTime :: AWSConnection -- ^ AWS connection information
-> S3Object -- ^ Object to be made available
-> Integer -- ^ Expiration time, in seconds since
-- 00:00:00 UTC on January 1, 1970
-> URI -- ^ URI for the object
publicUriUntilTime c obj time =
let act = S3Action c (urlEncode (obj_bucket obj)) (urlEncode (obj_name obj)) "" [] L.empty GET
in preSignedURI act time
-- | Create a pre-signed request URI. Anyone can use this to request
-- an object for the number of seconds specified.
publicUriForSeconds :: AWSConnection -- ^ AWS connection information
-> S3Object -- ^ Object to be made available
-> Integer -- ^ How many seconds until this
-- request expires
-> IO URI -- ^ URI for the object
publicUriForSeconds c obj time =
do TOD ctS _ <- getClockTime -- GHC specific, todo: get epoch within h98.
return (publicUriUntilTime c obj (ctS + time))
-- | Retrieve an object.
getObject :: AWSConnection -- ^ AWS connection information
-> S3Object -- ^ Object to retrieve
-> IO (AWSResult S3Object) -- ^ Server response
getObject = getObjectWithMethod GET
-- | Get object info without retrieving content body from server.
getObjectInfo :: AWSConnection -- ^ AWS connection information
-> S3Object -- ^ Object to retrieve information on
-> IO (AWSResult S3Object) -- ^ Server response
getObjectInfo = getObjectWithMethod HEAD
-- | Get an object with specified method.
getObjectWithMethod :: RequestMethod -- ^ Method to use for retrieval (GET/HEAD)
-> AWSConnection -- ^ AWS connection
-> S3Object -- ^ Object to request
-> IO (AWSResult S3Object)
getObjectWithMethod m aws obj =
do res <- Auth.runAction (S3Action aws (urlEncode (obj_bucket obj))
(urlEncode (obj_name obj))
""
(obj_headers obj)
L.empty m)
return (either Left (\x -> Right (populate_obj_from x)) res)
where
populate_obj_from x =
obj { obj_data = (rspBody x),
obj_headers = (headersFromResponse x) }
headersFromResponse :: HTTPResponse L.ByteString -> [(String,String)]
headersFromResponse r =
let respheaders = rspHeaders r
in map (\x -> case x of
Header (HdrCustom name) val -> (name, (mimeDecode val))
) (filter isAmzHeader respheaders)
-- | Delete an object. Only bucket and object name need to be
-- specified in the S3Object. Deletion of a non-existent object
-- does not return an error.
deleteObject :: AWSConnection -- ^ AWS connection information
-> S3Object -- ^ Object to delete
-> IO (AWSResult ()) -- ^ Server response
deleteObject aws obj = do res <- Auth.runAction (S3Action aws (urlEncode (obj_bucket obj))
(urlEncode (obj_name obj))
""
(obj_headers obj)
L.empty DELETE)
return (either Left (\_ -> Right ()) res)
-- | Copy object from one bucket to another (or the same bucket), preserving the original headers.
-- Headers from @destobj@ are sent, while only the
-- bucket and name of @srcobj@ are used. For the best
-- performance, when changing headers during a copy, use the
-- 'copyObjectWithReplace' function. For conditional copying, the
-- following headers set on the destination object may be used:
-- @x-amz-copy-source-if-match@, @x-amz-copy-source-if-none-match@,
-- @x-amz-copy-source-if-unmodified-since@, or
-- @x-amz-copy-source-if-modified-since@. See
-- <http://docs.amazonwebservices.com/AmazonS3/2006-03-01/API/index.html?RESTObjectCOPY.html>
-- for more details.
copyObject :: AWSConnection -- ^ AWS connection information
-> S3Object -- ^ Source object (bucket+name only)
-> S3Object -- ^ Destination object
-> IO (AWSResult S3Object) -- ^ Server response, headers include version information
copyObject aws srcobj destobj =
do res <- Auth.runAction (S3Action aws (urlEncode (obj_bucket destobj))
(urlEncode (obj_name destobj))
""
(copy_headers)
L.empty PUT)
return (either Left (\x -> Right (populate_obj_from x)) res)
where
populate_obj_from x =
destobj { obj_data = (rspBody x),
obj_headers = (headersFromResponse x) }
copy_headers = [("x-amz-copy-source",
("/"++ (urlEncode (obj_bucket srcobj))
++ "/" ++ (urlEncode (obj_name srcobj))))]
++ (obj_headers destobj)
-- | Copy object from one bucket to another (or the same bucket), replacing headers.
-- Any headers from @srcobj@ are ignored, and only those
-- set in @destobj@ are used.
copyObjectWithReplace :: AWSConnection -- ^ AWS connection information
-> S3Object -- ^ Source object (bucket+name only)
-> S3Object -- ^ Destination object
-> IO (AWSResult S3Object) -- ^ Server response, headers include version information
copyObjectWithReplace aws srcobj destobj =
copyObject aws srcobj (destobj {obj_headers =
(addToAL (obj_headers destobj)
"x-amz-metadata-directive"
"REPLACE")
})
-- from MissingH --
addToAL :: Eq key => [(key, elt)] -> key -> elt -> [(key, elt)]
addToAL l key value = (key, value) : (filter (\a -> (fst a) /= key) l)
|
necrobious/hS3
|
Network/AWS/S3Object.hs
|
bsd-3-clause
| 11,394 | 0 | 17 | 3,831 | 1,671 | 929 | 742 | 136 | 2 |
module Control.Distributed.Task.Util.ErrorHandling (withErrorPrefix, withErrorAction) where
import Control.Exception (catch, SomeException)
withErrorPrefix :: String -> IO a -> IO a
withErrorPrefix = withErrorHandling Nothing
withErrorAction :: (String -> IO ()) -> String -> IO a -> IO a
withErrorAction = withErrorHandling . Just
withErrorHandling :: Maybe (String -> IO ()) -> String -> IO a -> IO a
withErrorHandling errorAction prefix action = action `catch` wrapError
where
wrapError :: SomeException -> IO a
wrapError e = let errorMessage = prefix++": "++(show e) in do
maybe (return ()) (\eA -> eA errorMessage) errorAction
error $ errorMessage
|
michaxm/task-distribution
|
src/Control/Distributed/Task/Util/ErrorHandling.hs
|
bsd-3-clause
| 683 | 0 | 14 | 123 | 240 | 123 | 117 | 12 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Client.CheatVarT where
import Control.Lens (makeLenses)
import Types
makeLenses ''CheatVarT
|
ksaveljev/hake-2
|
src/Client/CheatVarT.hs
|
bsd-3-clause
| 155 | 0 | 6 | 37 | 28 | 16 | 12 | 5 | 0 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE Rank2Types #-}
module Flux where
import Data.Maybe(fromMaybe)
import Control.Lens(view,over,set,makeLenses,lens,Lens')
import Data.Ratio((%), Rational, numerator, denominator)
import qualified Data.Map as M
import qualified Data.Set as S
newtype VoterId = VoterId Int deriving (Show,Eq,Ord,Enum)
newtype IssueId = IssueId Int deriving (Show,Eq,Ord,Enum)
-- | A quantity of votes for a particular issue.
--
-- A fixed pool of vote tokens are created for each issue. The vote tokens can
-- only be used to vote _for that issue_, or they can be be traded with other
-- votes in exchange for liquidity tokens.
newtype VoteTokens = VoteTokens Int deriving (Show,Eq,Ord,Num,Enum,Integral,Real)
-- | A quantity of liquidity tokens.
--
-- Liquidity tokens are the medium of exchange by which vote tokens
-- can be traded between votes.
newtype LiquidityTokens = LiquidityTokens Int deriving (Show,Eq,Ord,Num,Enum,Integral,Real)
-- | The overall system state, consisting of information about
-- each voter, and about each issue.
data State = State {
_voters :: M.Map VoterId VoterState,
_issues :: M.Map IssueId IssueState
}
-- | The state associated with each voter
data VoterState = VoterState {
-- | How many votes this voter will receive for each new issue
_votesPerIssue :: VoteTokens,
-- | The number of liquidity tokens currently held
_liquidityTokens :: LiquidityTokens,
-- | How votes should be delegated
_delegation :: M.Map VoterId Rational
}
-- | The state for each voter on each issue
data VoterIssueState = VoterIssueState {
-- | The number of votes available for the issue
_availableVotes :: VoteTokens,
-- | The voter's position on the issue
_vote :: Vote
}
data IssueState = IssueState {
_votes :: M.Map VoterId VoterIssueState
}
data Vote = InFavour | Against | Abstained
data VoteResult = VoteResult {
inFavour :: VoteTokens,
against :: VoteTokens,
abstained :: VoteTokens
}
makeLenses ''VoterState
makeLenses ''State
makeLenses ''VoterIssueState
makeLenses ''IssueState
-- | A initial state with no issues and no voters
initialState :: State
initialState = State M.empty M.empty
-- | Register a new voter, and allocate a unique id.
--
-- For each new issue the voter will receive `nVotes`
-- vote tokens. For most voters `nVotes` will be 1, though
-- political parties will receive more votes where their
-- preferences have contributed to the election of a flux
-- senator.
--
newVoter :: VoteTokens -> State -> (State,VoterId)
newVoter nVotes state = (state',vid)
where
state' = over voters (M.insert vid details) state
vid = nextKey (view voters state)
details = VoterState nVotes 0 M.empty
-- | Create a new issue. Currently registered voters
-- receive their allocated votes for the issue, and their
-- vote on this issue will default to abstain.
--
newIssue :: State -> (State,IssueId)
newIssue state = (state,iid)
where
state' = over issues (M.insert iid details) state
iid = nextKey (view issues state)
details = IssueState (M.fromList [ (vid,VoterIssueState nvotes Abstained)
| (vid,nvotes) <- M.toList votesPostDelegation])
votesPreDelegation :: M.Map VoterId VoteTokens
votesPreDelegation = M.fromList [ (vid,view votesPerIssue vs) | (vid,vs) <- M.toList (view voters state)]
votesPostDelegation :: M.Map VoterId VoteTokens
votesPostDelegation = delegateVotes delegationGraph votesPreDelegation
delegationGraph :: DelGraph
delegationGraph = foldr (uncurry graphAdd) emptyGraph $
[ (vid,view delegation vs) | (vid,vs) <- M.toList (view voters state) ]
-- | Set the vote for a voter on a given issue
--
-- Subsequent calls to this function will override previous calls: the
-- last request prior to the issue ending will be used.
--
setVote :: VoterId -> IssueId -> Vote -> State -> State
setVote vid iid v = set (issues . melem iid . votes . melem vid . vote) v
-- | Trade some votes on an issue with another voter.
--
-- The request `tradeVote voter1 voter2 issue nVotes nLiquity` will
-- have voter1 receive nVotes from voter2, and give nLiquity tokens to
-- voter2. All vote and liquidity token balances must remain
-- positive.
--
tradeVotes :: VoterId -> VoterId -> IssueId -> VoteTokens -> LiquidityTokens -> State -> State
tradeVotes vid1 vid2 iid voteCount tokens
= over (issues . melem iid . votes. melem vid1 . availableVotes) (\v -> checkGEZero (v+voteCount))
. over (issues . melem iid . votes . melem vid2 . availableVotes) (\v -> checkGEZero (v-voteCount))
. over (voters . melem vid1 . liquidityTokens) (\v -> checkGEZero (v-tokens))
. over (voters . melem vid2 . liquidityTokens) (\v -> checkGEZero (v+tokens))
-- | Mark an issue as completed, and accumulate the voting results.
--
endIssue :: IssueId -> State -> (State,VoteResult)
endIssue iid state = (state',result)
where
issueState = view (issues . melem iid) state
state' = over issues (M.delete iid) state
result = VoteResult votesFor votesAgainst votesAbstained
allVotes = M.elems (view votes issueState)
votesFor = sum [votes | (VoterIssueState votes InFavour) <- allVotes]
votesAgainst = sum [votes | (VoterIssueState votes Against) <- allVotes]
votesAbstained = sum [votes | (VoterIssueState votes Abstained) <- allVotes]
-- | Distribute a new batch of liquidity tokens to voters.
--
-- Each voter will receive a fraction of the tokens according to
-- her/his fraction of the current voting pool
--
distributeLiquidity :: LiquidityTokens -> State -> State
distributeLiquidity toShare state = over voters (M.map updateVoter) state
where
nTotalVotes = sum [view votesPerIssue v | v <- M.elems (view voters state)]
updateVoter vs = over liquidityTokens ((+) (scale toShare ratio)) vs
where
ratio = mkRational (view votesPerIssue vs) nTotalVotes
type VoteAllocation = M.Map VoterId VoteTokens
-- | Given a graph of the delegation requests between voters, and
-- the votes available to each voter, calculate the final distribution
-- of votes.
--
-- Notes:
-- * The graph must be acyclic.
-- * Any residual, undelegated vote remains with the original
-- voter
--
delegateVotes :: DelGraph -> VoteAllocation -> VoteAllocation
delegateVotes g alloc
| graphSize g == 0 = alloc
| otherwise = let (g',alloc') = delegateStep g alloc
in if graphSize g' == graphSize g
then error "step made no progress - graph contains cycles"
else delegateVotes g' alloc'
where
-- Each step we process the "leaf" voters, ie those that don't
-- have any votes delegated to them. Assuming there are no cycles
-- in the graph, then each step will produce new leaf voters until
-- the graph is empty.
delegateStep :: DelGraph -> VoteAllocation -> (DelGraph,VoteAllocation)
delegateStep g alloc = (g',alloc')
where
leafVoters = S.difference (M.keysSet (delegationsTo g)) (M.keysSet (delegationsFrom g))
alloc' = foldr distributeVotes alloc (S.toList leafVoters)
g' = S.foldr graphRemove g leafVoters
distributeVotes :: VoterId -> VoteAllocation -> VoteAllocation
distributeVotes vid alloc = M.unionWith (+) toAdd $ M.unionWith (-) toRemove $ alloc
where
availableVotes :: VoteTokens
availableVotes = fromMaybe 0 (M.lookup vid alloc)
distribRatios :: M.Map VoterId Rational
distribRatios = fromMaybe M.empty (M.lookup vid (delegationsTo g))
toAdd,toRemove:: VoteAllocation
toAdd = M.map (scale availableVotes) distribRatios
toRemove = M.singleton vid (sum (M.elems toAdd))
----------------------------------------------------------------------
-- Directed Acyclic Graph structure for representing delegated votes
--
-- Invariants:
-- * The keyset of the delegationsTo map is the set of valid voters.
-- ie the values in the delegationsTo map, and the keys and values of
-- the delegationsFrom map must all be valid.
-- * The values of the delegationsFrom map must never be empty.
-- * The graph must be acyclic
data DelGraph = DelGraph {
delegationsFrom :: M.Map VoterId (S.Set VoterId),
delegationsTo :: M.Map VoterId (M.Map VoterId Rational)
}
emptyGraph :: DelGraph
emptyGraph = DelGraph M.empty M.empty
graphAdd :: VoterId -> (M.Map VoterId Rational) -> DelGraph -> DelGraph
graphAdd vid delegation g@(DelGraph from to)
| M.member vid to = graphAdd vid delegation (graphRemove vid g)
| M.null delegation = g
| otherwise = DelGraph from' to'
where
from' = undefined
to' = M.insert vid delegation to
graphRemove :: VoterId -> DelGraph -> DelGraph
graphRemove vid g@(DelGraph from to) =
case M.lookup vid to of
Nothing -> g
(Just delegations) ->
let from' = foldr (removeFrom vid) from (M.keys delegations)
to' = M.delete vid to
in DelGraph from' to'
where
removeFrom :: VoterId -> VoterId -> M.Map VoterId (S.Set VoterId) -> M.Map VoterId (S.Set VoterId)
removeFrom vid0 vid m = case M.lookup vid m of
Nothing -> m
(Just s) ->
let s' = S.delete vid0 s
in if S.null s' then M.delete vid m else M.insert vid s' m
graphSize :: DelGraph -> Int
graphSize g = M.size (delegationsTo g)
----------------------------------------------------------------------
-- Helper functions
nextKey :: (Enum k) => M.Map k a -> k
nextKey m | M.null m = toEnum 0
| otherwise = let (k,_) = M.findMax m in succ k
checkGEZero :: (Num a, Ord a) => a -> a
checkGEZero a | a < 0 = error "balance may not be negative"
| otherwise = a
melem :: (Ord k) => k -> Lens' (M.Map k v) v
melem k = lens get set
where
get m = case M.lookup k m of
Nothing -> error "missing map key"
(Just v) -> v
set m v = M.insert k v m
mkRational :: (Integral a) => a -> a -> Rational
mkRational v1 v2 = fromIntegral v1 % fromIntegral v2
scale :: Integral a => a-> Rational -> a
scale a rat = (a * fromIntegral (numerator rat)) `div` fromIntegral (denominator rat)
|
timbod7/flux-model
|
src/Flux.hs
|
bsd-3-clause
| 10,297 | 0 | 16 | 2,250 | 2,661 | 1,407 | 1,254 | 142 | 4 |
{-# LANGUAGE TemplateHaskell #-}
module Insomnia.SurfaceSyntax.SourcePos where
import Control.Applicative
import Control.Lens
import qualified Text.Parsec as P
data SourcePos = SourcePos {
_sourcePosName :: String
, _sourcePosLine :: !Int
, _sourcePosColumn :: !Int
}
data Positioned a = Positioned !SourcePos !a
instance Functor Positioned where
fmap f (Positioned p x) = Positioned p $ f x
instance Show a => Show (Positioned a) where
showsPrec _ (Positioned p x) = prettySourcePos p . showsPrec 10 x
prettySourcePos :: SourcePos -> ShowS
prettySourcePos (SourcePos f l c) =
showString f . showString ":"
. shows l . showString ":"
. shows c . showString ":"
makeLenses ''SourcePos
positioned :: Lens (Positioned a) (Positioned b) a b
positioned = lens getter setter
where
getter :: Positioned a -> a
getter (Positioned _pos x) = x
setter :: Positioned a -> b -> Positioned b
setter (Positioned pos _x) y = Positioned pos y
positionedSourcePos :: Lens' (Positioned a) SourcePos
positionedSourcePos = lens getter setter
where
getter (Positioned pos _x) = pos
setter (Positioned _pos x) pos' = (Positioned pos' x)
parsePositioned :: Monad m => P.ParsecT s u m a -> P.ParsecT s u m (Positioned a)
parsePositioned p =
Positioned <$> getPosition <*> p
where
getPosition = fmap pos2pos P.getPosition
pos2pos :: P.SourcePos -> SourcePos
pos2pos posn =
SourcePos (P.sourceName posn) (P.sourceLine posn) (P.sourceColumn posn)
|
lambdageek/insomnia
|
src/Insomnia/SurfaceSyntax/SourcePos.hs
|
bsd-3-clause
| 1,501 | 0 | 10 | 305 | 531 | 265 | 266 | 45 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE Trustworthy #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Traversable
-- Copyright : Conor McBride and Ross Paterson 2005
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Class of data structures that can be traversed from left to right,
-- performing an action on each element.
--
-- See also
--
-- * \"Applicative Programming with Effects\",
-- by Conor McBride and Ross Paterson,
-- /Journal of Functional Programming/ 18:1 (2008) 1-13, online at
-- <http://www.soi.city.ac.uk/~ross/papers/Applicative.html>.
--
-- * \"The Essence of the Iterator Pattern\",
-- by Jeremy Gibbons and Bruno Oliveira,
-- in /Mathematically-Structured Functional Programming/, 2006, online at
-- <http://web.comlab.ox.ac.uk/oucl/work/jeremy.gibbons/publications/#iterator>.
--
-- * \"An Investigation of the Laws of Traversals\",
-- by Mauro Jaskelioff and Ondrej Rypacek,
-- in /Mathematically-Structured Functional Programming/, 2012, online at
-- <http://arxiv.org/pdf/1202.2919>.
--
-----------------------------------------------------------------------------
module Data.Traversable (
-- * The 'Traversable' class
Traversable(..),
-- * Utility functions
for,
forM,
mapAccumL,
mapAccumR,
-- * General definitions for superclass methods
fmapDefault,
foldMapDefault,
) where
import Control.Applicative ( Const(..) )
import Data.Either ( Either(..) )
import Data.Foldable ( Foldable )
import Data.Functor
import Data.Proxy ( Proxy(..) )
import GHC.Arr
import GHC.Base ( Applicative(..), Monad(..), Monoid, Maybe(..),
($), (.), id, flip )
import qualified GHC.Base as Monad ( mapM )
import qualified GHC.List as List ( foldr )
-- | Functors representing data structures that can be traversed from
-- left to right.
--
-- A definition of 'traverse' must satisfy the following laws:
--
-- [/naturality/]
-- @t . 'traverse' f = 'traverse' (t . f)@
-- for every applicative transformation @t@
--
-- [/identity/]
-- @'traverse' Identity = Identity@
--
-- [/composition/]
-- @'traverse' (Compose . 'fmap' g . f) = Compose . 'fmap' ('traverse' g) . 'traverse' f@
--
-- A definition of 'sequenceA' must satisfy the following laws:
--
-- [/naturality/]
-- @t . 'sequenceA' = 'sequenceA' . 'fmap' t@
-- for every applicative transformation @t@
--
-- [/identity/]
-- @'sequenceA' . 'fmap' Identity = Identity@
--
-- [/composition/]
-- @'sequenceA' . 'fmap' Compose = Compose . 'fmap' 'sequenceA' . 'sequenceA'@
--
-- where an /applicative transformation/ is a function
--
-- @t :: (Applicative f, Applicative g) => f a -> g a@
--
-- preserving the 'Applicative' operations, i.e.
--
-- * @t ('pure' x) = 'pure' x@
--
-- * @t (x '<*>' y) = t x '<*>' t y@
--
-- and the identity functor @Identity@ and composition of functors @Compose@
-- are defined as
--
-- > newtype Identity a = Identity a
-- >
-- > instance Functor Identity where
-- > fmap f (Identity x) = Identity (f x)
-- >
-- > instance Applicative Indentity where
-- > pure x = Identity x
-- > Identity f <*> Identity x = Identity (f x)
-- >
-- > newtype Compose f g a = Compose (f (g a))
-- >
-- > instance (Functor f, Functor g) => Functor (Compose f g) where
-- > fmap f (Compose x) = Compose (fmap (fmap f) x)
-- >
-- > instance (Applicative f, Applicative g) => Applicative (Compose f g) where
-- > pure x = Compose (pure (pure x))
-- > Compose f <*> Compose x = Compose ((<*>) <$> f <*> x)
--
-- (The naturality law is implied by parametricity.)
--
-- Instances are similar to 'Functor', e.g. given a data type
--
-- > data Tree a = Empty | Leaf a | Node (Tree a) a (Tree a)
--
-- a suitable instance would be
--
-- > instance Traversable Tree where
-- > traverse f Empty = pure Empty
-- > traverse f (Leaf x) = Leaf <$> f x
-- > traverse f (Node l k r) = Node <$> traverse f l <*> f k <*> traverse f r
--
-- This is suitable even for abstract types, as the laws for '<*>'
-- imply a form of associativity.
--
-- The superclass instances should satisfy the following:
--
-- * In the 'Functor' instance, 'fmap' should be equivalent to traversal
-- with the identity applicative functor ('fmapDefault').
--
-- * In the 'Foldable' instance, 'Data.Foldable.foldMap' should be
-- equivalent to traversal with a constant applicative functor
-- ('foldMapDefault').
--
class (Functor t, Foldable t) => Traversable t where
{-# MINIMAL traverse | sequenceA #-}
-- | Map each element of a structure to an action, evaluate
-- these actions from left to right, and collect the results.
traverse :: Applicative f => (a -> f b) -> t a -> f (t b)
traverse f = sequenceA . fmap f
-- | Evaluate each action in the structure from left to right,
-- and collect the results.
sequenceA :: Applicative f => t (f a) -> f (t a)
sequenceA = traverse id
-- | Map each element of a structure to a monadic action, evaluate
-- these actions from left to right, and collect the results.
mapM :: Monad m => (a -> m b) -> t a -> m (t b)
mapM = traverse
-- | Evaluate each monadic action in the structure from left to right,
-- and collect the results.
sequence :: Monad m => t (m a) -> m (t a)
sequence = sequenceA
-- instances for Prelude types
instance Traversable Maybe where
traverse _ Nothing = pure Nothing
traverse f (Just x) = Just <$> f x
instance Traversable [] where
{-# INLINE traverse #-} -- so that traverse can fuse
traverse f = List.foldr cons_f (pure [])
where cons_f x ys = (:) <$> f x <*> ys
mapM = Monad.mapM
instance Traversable (Either a) where
traverse _ (Left x) = pure (Left x)
traverse f (Right y) = Right <$> f y
instance Traversable ((,) a) where
traverse f (x, y) = (,) x <$> f y
instance Ix i => Traversable (Array i) where
traverse f arr = listArray (bounds arr) `fmap` traverse f (elems arr)
instance Traversable Proxy where
traverse _ _ = pure Proxy
{-# INLINE traverse #-}
sequenceA _ = pure Proxy
{-# INLINE sequenceA #-}
mapM _ _ = return Proxy
{-# INLINE mapM #-}
sequence _ = return Proxy
{-# INLINE sequence #-}
instance Traversable (Const m) where
traverse _ (Const m) = pure $ Const m
-- general functions
-- | 'for' is 'traverse' with its arguments flipped.
for :: (Traversable t, Applicative f) => t a -> (a -> f b) -> f (t b)
{-# INLINE for #-}
for = flip traverse
-- | 'forM' is 'mapM' with its arguments flipped.
forM :: (Traversable t, Monad m) => t a -> (a -> m b) -> m (t b)
{-# INLINE forM #-}
forM = flip mapM
-- left-to-right state transformer
newtype StateL s a = StateL { runStateL :: s -> (s, a) }
instance Functor (StateL s) where
fmap f (StateL k) = StateL $ \ s -> let (s', v) = k s in (s', f v)
instance Applicative (StateL s) where
pure x = StateL (\ s -> (s, x))
StateL kf <*> StateL kv = StateL $ \ s ->
let (s', f) = kf s
(s'', v) = kv s'
in (s'', f v)
-- |The 'mapAccumL' function behaves like a combination of 'fmap'
-- and 'foldl'; it applies a function to each element of a structure,
-- passing an accumulating parameter from left to right, and returning
-- a final value of this accumulator together with the new structure.
mapAccumL :: Traversable t => (a -> b -> (a, c)) -> a -> t b -> (a, t c)
mapAccumL f s t = runStateL (traverse (StateL . flip f) t) s
-- right-to-left state transformer
newtype StateR s a = StateR { runStateR :: s -> (s, a) }
instance Functor (StateR s) where
fmap f (StateR k) = StateR $ \ s -> let (s', v) = k s in (s', f v)
instance Applicative (StateR s) where
pure x = StateR (\ s -> (s, x))
StateR kf <*> StateR kv = StateR $ \ s ->
let (s', v) = kv s
(s'', f) = kf s'
in (s'', f v)
-- |The 'mapAccumR' function behaves like a combination of 'fmap'
-- and 'foldr'; it applies a function to each element of a structure,
-- passing an accumulating parameter from right to left, and returning
-- a final value of this accumulator together with the new structure.
mapAccumR :: Traversable t => (a -> b -> (a, c)) -> a -> t b -> (a, t c)
mapAccumR f s t = runStateR (traverse (StateR . flip f) t) s
-- | This function may be used as a value for `fmap` in a `Functor`
-- instance, provided that 'traverse' is defined. (Using
-- `fmapDefault` with a `Traversable` instance defined only by
-- 'sequenceA' will result in infinite recursion.)
fmapDefault :: Traversable t => (a -> b) -> t a -> t b
{-# INLINE fmapDefault #-}
fmapDefault f = getId . traverse (Id . f)
-- | This function may be used as a value for `Data.Foldable.foldMap`
-- in a `Foldable` instance.
foldMapDefault :: (Traversable t, Monoid m) => (a -> m) -> t a -> m
foldMapDefault f = getConst . traverse (Const . f)
-- local instances
newtype Id a = Id { getId :: a }
instance Functor Id where
fmap f (Id x) = Id (f x)
instance Applicative Id where
pure = Id
Id f <*> Id x = Id (f x)
|
jstolarek/ghc
|
libraries/base/Data/Traversable.hs
|
bsd-3-clause
| 9,236 | 0 | 12 | 2,078 | 1,804 | 1,018 | 786 | 95 | 1 |
module Music.Score (
module Music.Score.Part,
module Music.Score.Pitch,
module Music.Score.Dynamics,
module Music.Score.Articulation,
module Music.Score.Slide,
module Music.Score.Tremolo,
module Music.Score.Text,
module Music.Score.Harmonics,
module Music.Score.Color,
module Music.Score.Ties,
module Music.Score.Phrases,
module Music.Score.Meta,
module Music.Score.Meta.Title,
module Music.Score.Meta.Attribution,
module Music.Score.Meta.RehearsalMark,
module Music.Score.Meta.Barline,
module Music.Score.Meta.Clef,
module Music.Score.Meta.Fermata,
module Music.Score.Meta.Key,
module Music.Score.Meta.Time,
module Music.Score.Meta.Tempo,
module Music.Score.Meta.Annotations,
module Music.Score.Import.Abc,
module Music.Score.Import.Lilypond,
module Music.Score.Import.Midi,
module Music.Score.Export.Backend,
module Music.Score.Export.NoteList,
module Music.Score.Export.Midi,
module Music.Score.Export.SuperCollider,
-- module Music.Score.Export.Lilypond,
-- module Music.Score.Export.MusicXml,
module Music.Time,
module Control.Lens.Operators,
module Control.Applicative,
module Control.Monad,
module Control.Monad.Plus,
module Data.Semigroup,
module Data.VectorSpace,
module Data.AffineSpace,
module Data.AffineSpace.Point,
from,
toListOf,
view,
-- to,
)
where
import Control.Applicative
import Control.Lens.Operators hiding ((<.>), (<|), (|>))
import Control.Lens (from, toListOf, view)
-- import Control.Lens hiding (Level, above, below,
-- inside, parts, reversed,
-- rewrite, simple, transform,
-- (<.>), (<|), (|>))
import Control.Monad hiding (mapM)
import Control.Monad.Plus
import Data.AffineSpace
import Data.AffineSpace.Point
import Data.Basis
import Data.Either
import Data.Foldable
import Data.Maybe
import Data.Ratio
import Data.Semigroup
import Data.Traversable
import Data.Typeable
import Data.VectorSpace hiding (Sum, getSum)
import Music.Time hiding (time)
import Music.Score.Articulation
import Music.Score.Color
import Music.Score.Dynamics
import Music.Score.Export.Backend
import Music.Score.Export.NoteList
import Music.Score.Export.Midi
import Music.Score.Export.SuperCollider
import Music.Score.Export.Abc
-- import Music.Score.Export.Lilypond
import Music.Score.Export.Midi
-- import Music.Score.Export.MusicXml
import Music.Score.Harmonics
import Music.Score.Import.Abc
import Music.Score.Import.Lilypond
import Music.Score.Import.Midi
import Music.Score.Internal.Instances
import Music.Score.Meta
import Music.Score.Meta.Annotations
import Music.Score.Meta.Attribution
import Music.Score.Meta.Barline
import Music.Score.Meta.Clef
import Music.Score.Meta.Fermata
import Music.Score.Meta.Key
import Music.Score.Meta.RehearsalMark
import Music.Score.Meta.Tempo
import Music.Score.Meta.Time
import Music.Score.Meta.Title
import Music.Score.Part
import Music.Score.Phrases
import Music.Score.Pitch
import Music.Score.Slide
import Music.Score.Text
import Music.Score.Ties
import Music.Score.Tremolo
|
music-suite/music-score
|
src/Music/Score.hs
|
bsd-3-clause
| 4,003 | 0 | 5 | 1,308 | 654 | 459 | 195 | 91 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
{-|
Module : Network.Spotify.Api.Types.Paging
Description : Spotify Paging object for pages of data.
Stability : experimental
-}
module Network.Spotify.Api.Types.Paging where
import Data.Aeson (FromJSON (parseJSON), ToJSON, defaultOptions,
genericParseJSON, genericToJSON, toJSON)
import Data.Text (Text)
import GHC.Generics (Generic)
import Servant ((:>), QueryParam)
-- | `limit` and `offset` query paramters for a request to paged Spotify Content
type PagedRequest = QueryParam "limit" Int :> QueryParam "offset" Int
-- | A request for a page of Spotify Content.
data PageRequest = PageRequest
{ requestLimit :: Maybe Int -- ^ The maximum number of items in the response
-- (as set in the query or by default).
, requestOffset :: Maybe Int -- ^ The offset of the items returned (as set in the
-- query or by default).
}
firstPage :: Int -> PageRequest
firstPage l = PageRequest (Just l) (Just 0)
-- | A Spotify Paging object for pages of Spotify Content.
data Paging a = Paging
{ href :: Text -- ^ A link to the Web API endpoint returning the full
-- result of the request.
, items :: a -- ^ The requested data.
, limit :: Int -- ^ The maximum number of items in the response
-- (as set in the query or by default).
, next :: Maybe Text -- ^ URL to the next page of items. ('Nothing' if
-- none)
, offset :: Int -- ^ The offset of the items returned (as set in the
-- query or by default).
, previous :: Maybe Text -- ^ URL to the previous page of items. ('Nothing'
-- if none)
, total :: Int -- ^ The total number of items available to return.
} deriving (Generic)
instance FromJSON a => FromJSON (Paging a) where
parseJSON = genericParseJSON defaultOptions
instance ToJSON a => ToJSON (Paging a) where
toJSON = genericToJSON defaultOptions
|
chances/servant-spotify
|
src/Network/Spotify/Api/Types/Paging.hs
|
bsd-3-clause
| 2,257 | 0 | 9 | 708 | 299 | 181 | 118 | 29 | 1 |
module Util where
import Crypto.Hash
import Data.List
import qualified Data.ByteString.Lazy as LB
import qualified Data.ByteString.Lazy.Char8 as LC
type Freq = (Char, Int)
{- | Determine the frequency of each char in a string. -}
freq :: String -> [Freq]
freq = map charfreq . group . sort . filter (/= '-')
where
charfreq l | h : _ <- l = (h, length l)
charfreq _ = error "empty freq list?"
{- | Simple wrapper to get the MD5 hash of a String as a String -}
md5 :: String -> String
md5 = LC.unpack . LC.fromStrict . digestToHexByteString . md5' . LC.pack
where
md5' :: LB.ByteString -> Digest MD5
md5' = hashlazy
{- | fn to be shorthand for a `seq` a -}
seq' :: a -> a
seq' a' = a' `seq` a'
|
wfleming/advent-of-code-2016
|
2016/src/Util.hs
|
bsd-3-clause
| 717 | 0 | 11 | 160 | 211 | 120 | 91 | 16 | 2 |
module Example.DataSource
(
config
, connect
, defineTable
)
where
import Language.Haskell.TH (Q, Dec, TypeQ)
import Language.Haskell.TH.Name.CamelCase (ConName)
import Database.HDBC.Query.TH (defineTableFromDB)
import Database.HDBC.Schema.Driver (typeMap)
import Database.HDBC.Schema.MySQL (driverMySQL)
import Database.HDBC.MySQL ( Connection
, connectMySQL
, MySQLConnectInfo(..)
, defaultMySQLConnectInfo
)
config :: MySQLConnectInfo
config = defaultMySQLConnectInfo {
mysqlUser = "hrr-tester"
, mysqlPassword = ""
, mysqlDatabase = "TEST"
, mysqlHost = "127.0.0.1"
}
connect :: IO Connection
connect = connectMySQL config
defineTable :: [(String, TypeQ)] -> String -> String -> [ConName] -> Q [Dec]
defineTable tmap = defineTableFromDB connect (driverMySQL { typeMap = tmap })
|
krdlab/haskell-relational-record-driver-mysql
|
example/src/Example/DataSource.hs
|
bsd-3-clause
| 1,128 | 0 | 10 | 439 | 222 | 137 | 85 | 24 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Server.ChallengeT where
import Control.Lens (makeLenses)
import QCommon.NetAdrT
import Types
makeLenses ''ChallengeT
newChallengeT :: ChallengeT
newChallengeT = ChallengeT
{ _chAdr = newNetAdrT
, _chChallenge = 0
, _chTime = 0
}
|
ksaveljev/hake-2
|
src/Server/ChallengeT.hs
|
bsd-3-clause
| 332 | 0 | 6 | 98 | 63 | 38 | 25 | 11 | 1 |
{-# LANGUAGE CPP ,NoMonomorphismRestriction #-}
-- |Encoder and encoding primitives
module Flat.Encoder (
Encoding,
(<>),
NumBits,
encodersS,
mempty,
strictEncoder,
eTrueF,
eFalseF,
eFloat,
eDouble,
eInteger,
eNatural,
eWord16,
eWord32,
eWord64,
eWord8,
eBits,
eBits16,
eFiller,
eBool,
eTrue,
eFalse,
eBytes,
#if! defined(ghcjs_HOST_OS) && ! defined (ETA_VERSION)
eUTF16,
#endif
eLazyBytes,
eShortBytes,
eInt,
eInt8,
eInt16,
eInt32,
eInt64,
eWord,
eChar,
encodeArrayWith,
encodeListWith,
Size,
arrayBits,
sWord,
sWord8,
sWord16,
sWord32,
sWord64,
sInt,
sInt8,
sInt16,
sInt32,
sInt64,
sNatural,
sInteger,
sFloat,
sDouble,
sChar,
sBytes,
sLazyBytes,
sShortBytes,
#ifndef ghcjs_HOST_OS
sUTF16,
#endif
sFillerMax,
sBool,
sUTF8Max,
eUTF8,
#ifdef ETA_VERSION
trampolineEncoding,
#endif
) where
import Flat.Encoder.Prim ( eTrueF, eFalseF )
import Flat.Encoder.Size(arrayBits)
import Flat.Encoder.Strict
import Flat.Encoder.Types ( NumBits, Size )
#if ! MIN_VERSION_base(4,11,0)
import Data.Semigroup((<>))
#endif
|
tittoassini/flat
|
src/Flat/Encoder.hs
|
bsd-3-clause
| 1,291 | 0 | 5 | 396 | 261 | 177 | 84 | -1 | -1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE Rank2Types #-}
module Sound.Fm
(
-- * Frequency modulation
-- * Arrow-style
afm
-- * Scan-style
, sfm
-- * Phase modulation
, pm
-- * Specialized
, ltpm
, rltpm
, gtfm
, rltfm
)
where
import Control.Arrow
import qualified Data.Vector.Unboxed as Vu
import Sound.Class
import Sound.InfList
import Sound.Int
import Sound.Hint
import Sound.Table
import Sound.Time
afm :: (Arrow a, Num n, Pair p) => StepSize n -> (Phase n -> Out u) -> a (p (Inp n) (Sta n)) (p (Out u) (Sta n))
afm d w = aint d >>^ mapFst w
{-# INLINE afm #-}
pm :: (Num n) => (Phase n -> Out u) -> Inp n -> Out u
pm = id
{-# INLINE pm #-}
{- | Frequency modulation wavetable synthesis.
The modulator output becomes the carrier frequency.
The integral of the modulator output becomes the carrier phase.
-}
sfm :: (RealFrac a, Vu.Unbox a, Functor f, Scan f) => StepSize a -> Carrier (Tab a) -> Modulator (f a) -> f a
sfm p carrier modulator =
fmap (tlookup carrier) (sint p modulator)
{-# INLINE sfm #-}
gtfm :: (RealFrac a, Vu.Unbox a, Functor f, Integrable f) => Precision r a -> Carrier (Tab a) -> Modulator (f a) -> f a
gtfm prec carrier modulator =
fmap f x
where
p = _prPeriod prec
f = tlookup carrier
x = int p modulator
{-# INLINE gtfm #-}
rltfm :: (RealFrac a, Vu.Unbox a) => Carrier (Tab a) -> Modulator (RL a) -> RL a
rltfm carrier modulator = rltpm carrier (rlint modulator)
{-# INLINE rltfm #-}
ltpm :: (RealFrac a, Vu.Unbox a) => Carrier (Tab a) -> Modulator (L a) -> L a
ltpm carrier = fmap (tlookup carrier)
{-# INLINE ltpm #-}
{- | Phase modulation wavetable synthesis.
The modulator output becomes the carrier phase.
-}
rltpm :: (RealFrac a, Vu.Unbox a) => Carrier (Tab a) -> Modulator (RL a) -> RL a
rltpm carrier modulator = rlmap (tlookup carrier) modulator
{-# INLINE rltpm #-}
|
edom/sound
|
src/Sound/Fm.hs
|
bsd-3-clause
| 1,903 | 0 | 12 | 434 | 680 | 351 | 329 | 45 | 1 |
{-# LANGUAGE RankNTypes #-}
module Lenses where
import Control.Lens (at, _Just, traverse, filtered, lens, view)
import Control.Lens.Operators
import Control.Lens.Type
import Types
atCoord :: (Int,Int) -> Traversal' Cave Room
atCoord (x,y) = rooms . at (x,y) . _Just
weaponRooms :: Traversal' Cave Room
weaponRooms = rooms . traverse . filtered (==Weapon)
playerCoord :: Lens' GameState (Int,Int)
playerCoord = player . playerPosition
playerHasWeapon :: Lens' GameState Bool
playerHasWeapon = player . playerWeapon
playerRoom :: Lens' GameState Room
playerRoom = lens getter setter
where getter g = g ^?! cave . atCoord (view playerCoord g)
setter g r = g & cave . atCoord (view playerCoord g) .~ r
|
markus1189/wumpus-cave
|
Lenses.hs
|
bsd-3-clause
| 715 | 0 | 11 | 123 | 252 | 138 | 114 | -1 | -1 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.SGIX.TextureMultiBuffer
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.SGIX.TextureMultiBuffer (
-- * Extension Support
glGetSGIXTextureMultiBuffer,
gl_SGIX_texture_multi_buffer,
-- * Enums
pattern GL_TEXTURE_MULTI_BUFFER_HINT_SGIX
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
|
haskell-opengl/OpenGLRaw
|
src/Graphics/GL/SGIX/TextureMultiBuffer.hs
|
bsd-3-clause
| 689 | 0 | 5 | 91 | 47 | 36 | 11 | 7 | 0 |
{-# LANGUAGE QuasiQuotes #-}
import LiquidHaskell
[lq| isEven :: Nat -> {v:Int | v = 0} -> Bool |]
[lq| Decrease isEven 1 2 |]
isEven :: Int -> Int -> Bool
isEven 0 _ = True
isEven n _ = isOdd (n-1) 1
[lq| isOdd :: Nat -> {v:Int | v = 1} -> Bool |]
[lq| Decrease isOdd 1 2 |]
isOdd :: Int -> Int -> Bool
isOdd n _ = not $ isEven n 0
|
spinda/liquidhaskell
|
tests/gsoc15/unknown/pos/mutrec.hs
|
bsd-3-clause
| 340 | 0 | 7 | 86 | 110 | 62 | 48 | 11 | 1 |
{-# LANGUAGE BangPatterns #-}
module Main where
import qualified Data.ByteString as BS
import System.IO
import Control.Monad
import Data.Bits
import Data.Word
import Data.Monoid
import Data.StrictPut
import Data.ByteString.Lazy.Builder -- requires bytestring-0.10.x
import Criterion.Main
import System.IO.Silently
bs = BS.replicate 64 0
builder = mconcat (replicate 1000000 (byteString bs))
buildPL = mconcat (replicate 64 (word8 0))
{-# INLINE buildPL #-}
buildMAC :: Word64 -> Builder
buildMAC i = word16BE (fromIntegral $ i `shiftR` 32) <> word32BE (fromIntegral $ i .&. 0xFFFFFFFF)
{-# INLINE buildMAC #-}
putMAC :: Word64 -> Put
putMAC i = putWord16be (fromIntegral $ i `shiftR` 32) >> putWord32be (fromIntegral $ i .&. 0xFFFFFFFF)
{-# INLINE putMAC #-}
buildEth :: (Word64,Word64) -> Builder
buildEth (i,j) = mconcat [ buildMAC i, buildMAC j, word32BE 0, word16BE 0 -- Ethernet Frame header
, word32BE 0, word32BE 0, word32BE 0, word32BE 0 -- IP Header
, word32BE 0, word32BE 0, word32BE 0, word32BE 0 -- TCP Header
, byteString bs -- Payload
, word32BE 0 -- CRC32
]
{-# INLINE buildEth #-}
buildEth2 :: (Word64,Word64) -> Builder
buildEth2 (i,j) = mconcat [ buildMAC i, buildMAC j, word32BE 0, word16BE 0 -- Ethernet Frame header
, lazyByteString ipHeader -- IP Header
, lazyByteString tcpHeader -- TCP Header
, byteString bs -- Payload
, word32BE 0 -- CRC32
]
where ipHeader = toLazyByteString $ mconcat [word32BE 0, word32BE 0, word32BE 0, word32BE 0]
tcpHeader = toLazyByteString $ mconcat [word32BE 0, word32BE 0, word32BE 0, word32BE 0]
{-# NOINLINE buildEth2 #-}
putEth2 :: (Word64,Word64) -> Put
putEth2 (i,j) = do
putMAC i
putMAC j
putWord32be 0
putWord16be 0
putIpHeader
putTcpHeader
putByteString bs
putWord32be 0
where putIpHeader = do putWord32be 0 >> putWord32be 0 >> putWord32be 0 >> putWord32be 0
putTcpHeader = do putWord32be 0 >> putWord32be 0 >> putWord32be 0 >> putWord32be 0
{-# NOINLINE putEth2 #-}
buildAll = mconcat (map buildEth2 [(i,j)|i <- [1..100], j <- [1..100]])
{-# NOINLINE buildAll #-}
-- StrictPut w/o buffering
main1 = do
mapM_ (\x -> BS.hPutStr stdout (runPutToByteString 4096 (putEth2 x))) [(i,j) | i <- [1..100], j <- [1..100]]
-- StrictPut with buffering w/o reusing
main2 = do
b <- mkBuffer 32768
let go b' [] = BS.hPutStr stdout (extract b')
go b' i@(x:xs) =
if bufferSize b' > 16346
then BS.hPutStr stdout (extract b') >> mkBuffer 32768 >>= \b'' -> go b'' i
else do
b'' <- runPutToBuffer b' (putEth2 x)
go b'' xs
go b [(i,j) | i <- [1..100], j <- [1..100]]
-- StrictPut with buffering with reusing
main3 = do
b <- mkBuffer 32768
let go b' [] = BS.hPutStr stdout (extract b')
go b' i@(x:xs) =
if bufferSize b' > 16346
then BS.hPutStr stdout (extract b') >> go (reuse b') i
else do
b'' <- runPutToBuffer b' (putEth2 x)
go b'' xs
go b [(i,j) | i <- [1..100], j <- [1..100]]
-- bytestring
main4 = do
hPutBuilder stdout buildAll
main = defaultMain
[ bgroup "only generation"
[ bench "strict put w/o buffering" $ nf (map (\x -> runPutToByteString 4096 (putEth2 x))) [(i,j) | i <- [1..100], j <- [1..100]]
, bench "bytestring builder" $ nf (map (\x -> toLazyByteString (buildEth x))) [(i,j) | i <- [1..100], j <- [1..100]]
]
, bgroup "stdout output under silence"
[ bench "strict put w/buffering w/o reusing" $ silence main2
, bench "strict put w/buffering w/reusing" $ silence main3
, bench "bytestring-builder" $ silence main4
]
]
|
qnikst/strictput
|
bench/SimpleOutput.hs
|
bsd-3-clause
| 4,122 | 0 | 17 | 1,306 | 1,358 | 702 | 656 | 85 | 3 |
{-# LANGUAGE CPP #-}
module TcCanonical(
canonicalize,
unifyDerived,
makeSuperClasses, mkGivensWithSuperClasses,
StopOrContinue(..), stopWith, continueWith
) where
#include "HsVersions.h"
import TcRnTypes
import TcType
import Type
import TcFlatten
import TcSMonad
import TcEvidence
import Class
import TyCon
import TyCoRep -- cleverly decomposes types, good for completeness checking
import Coercion
import FamInstEnv ( FamInstEnvs )
import FamInst ( tcTopNormaliseNewTypeTF_maybe )
import Var
import Name( isSystemName )
import OccName( OccName )
import Outputable
import DynFlags( DynFlags )
import VarSet
import NameSet
import RdrName
import Pair
import Util
import Bag
import MonadUtils
import Control.Monad
import Data.List ( zip4, foldl' )
import BasicTypes
import Data.Bifunctor ( bimap )
{-
************************************************************************
* *
* The Canonicaliser *
* *
************************************************************************
Note [Canonicalization]
~~~~~~~~~~~~~~~~~~~~~~~
Canonicalization converts a simple constraint to a canonical form. It is
unary (i.e. treats individual constraints one at a time), does not do
any zonking, but lives in TcS monad because it needs to create fresh
variables (for flattening) and consult the inerts (for efficiency).
The execution plan for canonicalization is the following:
1) Decomposition of equalities happens as necessary until we reach a
variable or type family in one side. There is no decomposition step
for other forms of constraints.
2) If, when we decompose, we discover a variable on the head then we
look at inert_eqs from the current inert for a substitution for this
variable and contine decomposing. Hence we lazily apply the inert
substitution if it is needed.
3) If no more decomposition is possible, we deeply apply the substitution
from the inert_eqs and continue with flattening.
4) During flattening, we examine whether we have already flattened some
function application by looking at all the CTyFunEqs with the same
function in the inert set. The reason for deeply applying the inert
substitution at step (3) is to maximise our chances of matching an
already flattened family application in the inert.
The net result is that a constraint coming out of the canonicalization
phase cannot be rewritten any further from the inerts (but maybe /it/ can
rewrite an inert or still interact with an inert in a further phase in the
simplifier.
Note [Caching for canonicals]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Our plan with pre-canonicalization is to be able to solve a constraint
really fast from existing bindings in TcEvBinds. So one may think that
the condition (isCNonCanonical) is not necessary. However consider
the following setup:
InertSet = { [W] d1 : Num t }
WorkList = { [W] d2 : Num t, [W] c : t ~ Int}
Now, we prioritize equalities, but in our concrete example
(should_run/mc17.hs) the first (d2) constraint is dealt with first,
because (t ~ Int) is an equality that only later appears in the
worklist since it is pulled out from a nested implication
constraint. So, let's examine what happens:
- We encounter work item (d2 : Num t)
- Nothing is yet in EvBinds, so we reach the interaction with inerts
and set:
d2 := d1
and we discard d2 from the worklist. The inert set remains unaffected.
- Now the equation ([W] c : t ~ Int) is encountered and kicks-out
(d1 : Num t) from the inerts. Then that equation gets
spontaneously solved, perhaps. We end up with:
InertSet : { [G] c : t ~ Int }
WorkList : { [W] d1 : Num t}
- Now we examine (d1), we observe that there is a binding for (Num
t) in the evidence binds and we set:
d1 := d2
and end up in a loop!
Now, the constraints that get kicked out from the inert set are always
Canonical, so by restricting the use of the pre-canonicalizer to
NonCanonical constraints we eliminate this danger. Moreover, for
canonical constraints we already have good caching mechanisms
(effectively the interaction solver) and we are interested in reducing
things like superclasses of the same non-canonical constraint being
generated hence I don't expect us to lose a lot by introducing the
(isCNonCanonical) restriction.
A similar situation can arise in TcSimplify, at the end of the
solve_wanteds function, where constraints from the inert set are
returned as new work -- our substCt ensures however that if they are
not rewritten by subst, they remain canonical and hence we will not
attempt to solve them from the EvBinds. If on the other hand they did
get rewritten and are now non-canonical they will still not match the
EvBinds, so we are again good.
-}
-- Top-level canonicalization
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
canonicalize :: Ct -> TcS (StopOrContinue Ct)
canonicalize ct@(CNonCanonical { cc_ev = ev })
= do { traceTcS "canonicalize (non-canonical)" (ppr ct)
; {-# SCC "canEvVar" #-}
canEvNC ev }
canonicalize (CDictCan { cc_ev = ev, cc_class = cls
, cc_tyargs = xis, cc_pend_sc = pend_sc })
= {-# SCC "canClass" #-}
canClass ev cls xis pend_sc
canonicalize (CTyEqCan { cc_ev = ev
, cc_tyvar = tv
, cc_rhs = xi
, cc_eq_rel = eq_rel })
= {-# SCC "canEqLeafTyVarEq" #-}
canEqNC ev eq_rel (mkTyVarTy tv) xi
-- NB: Don't use canEqTyVar because that expects flattened types,
-- and tv and xi may not be flat w.r.t. an updated inert set
canonicalize (CFunEqCan { cc_ev = ev
, cc_fun = fn
, cc_tyargs = xis1
, cc_fsk = fsk })
= {-# SCC "canEqLeafFunEq" #-}
canCFunEqCan ev fn xis1 fsk
canonicalize (CIrredEvCan { cc_ev = ev })
= canIrred ev
canonicalize (CHoleCan { cc_ev = ev, cc_occ = occ, cc_hole = hole })
= canHole ev occ hole
canEvNC :: CtEvidence -> TcS (StopOrContinue Ct)
-- Called only for non-canonical EvVars
canEvNC ev
= case classifyPredType (ctEvPred ev) of
ClassPred cls tys -> do traceTcS "canEvNC:cls" (ppr cls <+> ppr tys)
canClassNC ev cls tys
EqPred eq_rel ty1 ty2 -> do traceTcS "canEvNC:eq" (ppr ty1 $$ ppr ty2)
canEqNC ev eq_rel ty1 ty2
IrredPred {} -> do traceTcS "canEvNC:irred" (ppr (ctEvPred ev))
canIrred ev
{-
************************************************************************
* *
* Class Canonicalization
* *
************************************************************************
-}
canClassNC :: CtEvidence -> Class -> [Type] -> TcS (StopOrContinue Ct)
-- Precondition: EvVar is class evidence
canClassNC ev cls tys = canClass ev cls tys (has_scs cls)
where
has_scs cls = not (null (classSCTheta cls))
canClass :: CtEvidence -> Class -> [Type] -> Bool -> TcS (StopOrContinue Ct)
-- Precondition: EvVar is class evidence
canClass ev cls tys pend_sc
= -- all classes do *nominal* matching
ASSERT2( ctEvRole ev == Nominal, ppr ev $$ ppr cls $$ ppr tys )
do { (xis, cos) <- flattenManyNom ev tys
; let co = mkTcTyConAppCo Nominal (classTyCon cls) cos
xi = mkClassPred cls xis
mk_ct new_ev = CDictCan { cc_ev = new_ev
, cc_tyargs = xis
, cc_class = cls
, cc_pend_sc = pend_sc }
; mb <- rewriteEvidence ev xi co
; traceTcS "canClass" (vcat [ ppr ev
, ppr xi, ppr mb ])
; return (fmap mk_ct mb) }
{- Note [The superclass story]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to add superclass constraints for two reasons:
* For givens, they give us a route to to proof. E.g.
f :: Ord a => a -> Bool
f x = x == x
We get a Wanted (Eq a), which can only be solved from the superclass
of the Given (Ord a).
* For wanteds, they may give useful functional dependencies. E.g.
class C a b | a -> b where ...
class C a b => D a b where ...
Now a Wanted constraint (D Int beta) has (C Int beta) as a superclass
and that might tell us about beta, via C's fundeps. We can get this
by generateing a Derived (C Int beta) constraint. It's derived because
we don't actually have to cough up any evidence for it; it's only there
to generate fundep equalities.
See Note [Why adding superclasses can help].
For these reasons we want to generate superclass constraints for both
Givens and Wanteds. But:
* (Minor) they are often not needed, so generating them aggressively
is a waste of time.
* (Major) if we want recursive superclasses, there would be an infinite
number of them. Here is a real-life example (Trac #10318);
class (Frac (Frac a) ~ Frac a,
Fractional (Frac a),
IntegralDomain (Frac a))
=> IntegralDomain a where
type Frac a :: *
Notice that IntegralDomain has an associated type Frac, and one
of IntegralDomain's superclasses is another IntegralDomain constraint.
So here's the plan:
1. Generate superclasses for given (but not wanted) constraints;
see Note [Aggressively expand given superclasses]. However
stop if you encounter the same class twice. That is, expand
eagerly, but have a conservative termination condition: see
Note [Expanding superclasses] in TcType.
2. Solve the wanteds as usual, but do /no/ expansion of superclasses
in solveSimpleGivens or solveSimpleWanteds.
See Note [Danger of adding superclasses during solving]
3. If we have any remaining unsolved wanteds, try harder:
take both the Givens and Wanteds, and expand superclasses again.
This may succeed in generating (a finite number of) extra Givens,
and extra Deriveds. Both may help the proof.
This is done in TcSimplify.expandSuperClasses.
4. Go round to (2) again. This loop (2,3,4) is implemented
in TcSimplify.simpl_loop.
We try to terminate the loop by flagging which class constraints
(given or wanted) are potentially un-expanded. This is what the
cc_pend_sc flag is for in CDictCan. So in Step 3 we only expand
superclasses for constraints with cc_pend_sc set to true (i.e.
isPendingScDict holds).
When we take a CNonCanonical or CIrredCan, but end up classifying it
as a CDictCan, we set the cc_pend_sc flag to False.
Note [Aggressively expand given superclasses]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In step (1) of Note [The superclass story], why do we aggressively
expand Given superclasses by one layer? Mainly because of some very
obscure cases like this:
instance Bad a => Eq (T a)
f :: (Ord (T a)) => blah
f x = ....needs Eq (T a), Ord (T a)....
Here if we can't satisfy (Eq (T a)) from the givens we'll use the
instance declaration; but then we are stuck with (Bad a). Sigh.
This is really a case of non-confluent proofs, but to stop our users
complaining we expand one layer in advance.
Note [Why adding superclasses can help]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Examples of how adding superclasses can help:
--- Example 1
class C a b | a -> b
Suppose we want to solve
[G] C a b
[W] C a beta
Then adding [D] beta~b will let us solve it.
-- Example 2 (similar but using a type-equality superclass)
class (F a ~ b) => C a b
And try to sllve:
[G] C a b
[W] C a beta
Follow the superclass rules to add
[G] F a ~ b
[D] F a ~ beta
Now we we get [D] beta ~ b, and can solve that.
-- Example (tcfail138)
class L a b | a -> b
class (G a, L a b) => C a b
instance C a b' => G (Maybe a)
instance C a b => C (Maybe a) a
instance L (Maybe a) a
When solving the superclasses of the (C (Maybe a) a) instance, we get
[G] C a b, and hance by superclasses, [G] G a, [G] L a b
[W] G (Maybe a)
Use the instance decl to get
[W] C a beta
Generate its derived superclass
[D] L a beta. Now using fundeps, combine with [G] L a b to get
[D] beta ~ b
which is what we want.
Note [Danger of adding superclasses during solving]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here's a serious, but now out-dated example, from Trac #4497:
class Num (RealOf t) => Normed t
type family RealOf x
Assume the generated wanted constraint is:
[W] RealOf e ~ e
[W] Normed e
If we were to be adding the superclasses during simplification we'd get:
[W] RealOf e ~ e
[W] Normed e
[D] RealOf e ~ fuv
[D] Num fuv
==>
e := fuv, Num fuv, Normed fuv, RealOf fuv ~ fuv
While looks exactly like our original constraint. If we add the
superclass of (Normed fuv) again we'd loop. By adding superclasses
definitely only once, during canonicalisation, this situation can't
happen.
Mind you, now that Wanteds cannot rewrite Derived, I think this particular
situation can't happen.
-}
mkGivensWithSuperClasses :: CtLoc -> [EvId] -> TcS [Ct]
-- From a given EvId, make its Ct, plus the Ct's of its superclasses
-- See Note [The superclass story]
-- The loop-breaking here follows Note [Expanding superclasses] in TcType
mkGivensWithSuperClasses loc ev_ids = concatMapM go ev_ids
where
go ev_id = mk_superclasses emptyNameSet $
CtGiven { ctev_evar = ev_id
, ctev_pred = evVarPred ev_id
, ctev_loc = loc }
makeSuperClasses :: [Ct] -> TcS [Ct]
-- Returns strict superclasses, transitively, see Note [The superclasses story]
-- See Note [The superclass story]
-- The loop-breaking here follows Note [Expanding superclasses] in TcType
makeSuperClasses cts = concatMapM go cts
where
go (CDictCan { cc_ev = ev, cc_class = cls, cc_tyargs = tys })
= mk_strict_superclasses emptyNameSet ev cls tys
go ct = pprPanic "makeSuperClasses" (ppr ct)
mk_superclasses :: NameSet -> CtEvidence -> TcS [Ct]
-- Return this constraint, plus its superclasses, if any
mk_superclasses rec_clss ev
| ClassPred cls tys <- classifyPredType (ctEvPred ev)
= mk_superclasses_of rec_clss ev cls tys
| otherwise -- Superclass is not a class predicate
= return [mkNonCanonical ev]
mk_superclasses_of :: NameSet -> CtEvidence -> Class -> [Type] -> TcS [Ct]
-- Return this class constraint, plus its superclasses
mk_superclasses_of rec_clss ev cls tys
| loop_found
= return [this_ct]
| otherwise
= do { sc_cts <- mk_strict_superclasses rec_clss' ev cls tys
; return (this_ct : sc_cts) }
where
cls_nm = className cls
loop_found = cls_nm `elemNameSet` rec_clss
rec_clss' | isCTupleClass cls = rec_clss -- Never contribute to recursion
| otherwise = rec_clss `extendNameSet` cls_nm
this_ct = CDictCan { cc_ev = ev, cc_class = cls, cc_tyargs = tys
, cc_pend_sc = loop_found }
mk_strict_superclasses :: NameSet -> CtEvidence -> Class -> [Type] -> TcS [Ct]
mk_strict_superclasses rec_clss ev cls tys
| CtGiven { ctev_evar = evar, ctev_loc = loc } <- ev
= do { sc_evs <- newGivenEvVars (mk_given_loc loc)
(mkEvScSelectors (EvId evar) cls tys)
; concatMapM (mk_superclasses rec_clss) sc_evs }
| isEmptyVarSet (tyCoVarsOfTypes tys)
= return [] -- Wanteds with no variables yield no deriveds.
-- See Note [Improvement from Ground Wanteds]
| otherwise -- Wanted/Derived case, just add those SC that can lead to improvement.
= do { let loc = ctEvLoc ev
; sc_evs <- mapM (newDerivedNC loc) (immSuperClasses cls tys)
; concatMapM (mk_superclasses rec_clss) sc_evs }
where
size = sizeTypes tys
mk_given_loc loc
| isCTupleClass cls
= loc -- For tuple predicates, just take them apart, without
-- adding their (large) size into the chain. When we
-- get down to a base predicate, we'll include its size.
-- Trac #10335
| GivenOrigin skol_info <- ctLocOrigin loc
-- See Note [Solving superclass constraints] in TcInstDcls
-- for explantation of this transformation for givens
= case skol_info of
InstSkol -> loc { ctl_origin = GivenOrigin (InstSC size) }
InstSC n -> loc { ctl_origin = GivenOrigin (InstSC (n `max` size)) }
_ -> loc
| otherwise -- Probably doesn't happen, since this function
= loc -- is only used for Givens, but does no harm
{-
************************************************************************
* *
* Irreducibles canonicalization
* *
************************************************************************
-}
canIrred :: CtEvidence -> TcS (StopOrContinue Ct)
-- Precondition: ty not a tuple and no other evidence form
canIrred old_ev
= do { let old_ty = ctEvPred old_ev
; traceTcS "can_pred" (text "IrredPred = " <+> ppr old_ty)
; (xi,co) <- flatten FM_FlattenAll old_ev old_ty -- co :: xi ~ old_ty
; rewriteEvidence old_ev xi co `andWhenContinue` \ new_ev ->
do { -- Re-classify, in case flattening has improved its shape
; case classifyPredType (ctEvPred new_ev) of
ClassPred cls tys -> canClassNC new_ev cls tys
EqPred eq_rel ty1 ty2 -> canEqNC new_ev eq_rel ty1 ty2
_ -> continueWith $
CIrredEvCan { cc_ev = new_ev } } }
canHole :: CtEvidence -> OccName -> HoleSort -> TcS (StopOrContinue Ct)
canHole ev occ hole_sort
= do { let ty = ctEvPred ev
; (xi,co) <- flatten FM_SubstOnly ev ty -- co :: xi ~ ty
; rewriteEvidence ev xi co `andWhenContinue` \ new_ev ->
do { emitInsoluble (CHoleCan { cc_ev = new_ev
, cc_occ = occ
, cc_hole = hole_sort })
; stopWith new_ev "Emit insoluble hole" } }
{-
************************************************************************
* *
* Equalities
* *
************************************************************************
Note [Canonicalising equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In order to canonicalise an equality, we look at the structure of the
two types at hand, looking for similarities. A difficulty is that the
types may look dissimilar before flattening but similar after flattening.
However, we don't just want to jump in and flatten right away, because
this might be wasted effort. So, after looking for similarities and failing,
we flatten and then try again. Of course, we don't want to loop, so we
track whether or not we've already flattened.
It is conceivable to do a better job at tracking whether or not a type
is flattened, but this is left as future work. (Mar '15)
-}
canEqNC :: CtEvidence -> EqRel -> Type -> Type -> TcS (StopOrContinue Ct)
canEqNC ev eq_rel ty1 ty2
= do { result <- zonk_eq_types ty1 ty2
; case result of
Left (Pair ty1' ty2') -> can_eq_nc False ev eq_rel ty1' ty1 ty2' ty2
Right ty -> canEqReflexive ev eq_rel ty }
can_eq_nc
:: Bool -- True => both types are flat
-> CtEvidence
-> EqRel
-> Type -> Type -- LHS, after and before type-synonym expansion, resp
-> Type -> Type -- RHS, after and before type-synonym expansion, resp
-> TcS (StopOrContinue Ct)
can_eq_nc flat ev eq_rel ty1 ps_ty1 ty2 ps_ty2
= do { traceTcS "can_eq_nc" $
vcat [ ppr ev, ppr eq_rel, ppr ty1, ppr ps_ty1, ppr ty2, ppr ps_ty2 ]
; rdr_env <- getGlobalRdrEnvTcS
; fam_insts <- getFamInstEnvs
; can_eq_nc' flat rdr_env fam_insts ev eq_rel ty1 ps_ty1 ty2 ps_ty2 }
can_eq_nc'
:: Bool -- True => both input types are flattened
-> GlobalRdrEnv -- needed to see which newtypes are in scope
-> FamInstEnvs -- needed to unwrap data instances
-> CtEvidence
-> EqRel
-> Type -> Type -- LHS, after and before type-synonym expansion, resp
-> Type -> Type -- RHS, after and before type-synonym expansion, resp
-> TcS (StopOrContinue Ct)
-- Expand synonyms first; see Note [Type synonyms and canonicalization]
can_eq_nc' flat _rdr_env _envs ev eq_rel ty1 ps_ty1 ty2 ps_ty2
| Just ty1' <- coreView ty1 = can_eq_nc flat ev eq_rel ty1' ps_ty1 ty2 ps_ty2
| Just ty2' <- coreView ty2 = can_eq_nc flat ev eq_rel ty1 ps_ty1 ty2' ps_ty2
-- need to check for reflexivity in the ReprEq case.
-- See Note [Eager reflexivity check]
-- Check only when flat because the zonk_eq_types check in canEqNC takes
-- care of the non-flat case.
can_eq_nc' True _rdr_env _envs ev ReprEq ty1 _ ty2 _
| ty1 `eqType` ty2
= canEqReflexive ev ReprEq ty1
-- When working with ReprEq, unwrap newtypes.
can_eq_nc' _flat rdr_env envs ev ReprEq ty1 _ ty2 ps_ty2
| Just (co, ty1') <- tcTopNormaliseNewTypeTF_maybe envs rdr_env ty1
= can_eq_newtype_nc rdr_env ev NotSwapped co ty1 ty1' ty2 ps_ty2
can_eq_nc' _flat rdr_env envs ev ReprEq ty1 ps_ty1 ty2 _
| Just (co, ty2') <- tcTopNormaliseNewTypeTF_maybe envs rdr_env ty2
= can_eq_newtype_nc rdr_env ev IsSwapped co ty2 ty2' ty1 ps_ty1
-- Then, get rid of casts
can_eq_nc' flat _rdr_env _envs ev eq_rel (CastTy ty1 co1) _ ty2 ps_ty2
= canEqCast flat ev eq_rel NotSwapped ty1 co1 ty2 ps_ty2
can_eq_nc' flat _rdr_env _envs ev eq_rel ty1 ps_ty1 (CastTy ty2 co2) _
= canEqCast flat ev eq_rel IsSwapped ty2 co2 ty1 ps_ty1
----------------------
-- Otherwise try to decompose
----------------------
-- Literals
can_eq_nc' _flat _rdr_env _envs ev eq_rel ty1@(LitTy l1) _ (LitTy l2) _
| l1 == l2
= do { setEqIfWanted ev (mkReflCo (eqRelRole eq_rel) ty1)
; stopWith ev "Equal LitTy" }
-- Try to decompose type constructor applications
-- Including FunTy (s -> t)
can_eq_nc' _flat _rdr_env _envs ev eq_rel ty1 _ ty2 _
| Just (tc1, tys1) <- tcRepSplitTyConApp_maybe ty1
, Just (tc2, tys2) <- tcRepSplitTyConApp_maybe ty2
, not (isTypeFamilyTyCon tc1)
, not (isTypeFamilyTyCon tc2)
= canTyConApp ev eq_rel tc1 tys1 tc2 tys2
can_eq_nc' _flat _rdr_env _envs ev eq_rel
s1@(ForAllTy (Named {}) _) _ s2@(ForAllTy (Named {}) _) _
| CtWanted { ctev_loc = loc, ctev_dest = orig_dest } <- ev
= do { let (bndrs1,body1) = tcSplitNamedPiTys s1
(bndrs2,body2) = tcSplitNamedPiTys s2
; if not (equalLength bndrs1 bndrs2)
|| not (map binderVisibility bndrs1 == map binderVisibility bndrs2)
then canEqHardFailure ev s1 s2
else
do { traceTcS "Creating implication for polytype equality" $ ppr ev
; kind_cos <- zipWithM (unifyWanted loc Nominal)
(map binderType bndrs1) (map binderType bndrs2)
; all_co <- deferTcSForAllEq (eqRelRole eq_rel) loc
kind_cos (bndrs1,body1) (bndrs2,body2)
; setWantedEq orig_dest all_co
; stopWith ev "Deferred polytype equality" } }
| otherwise
= do { traceTcS "Omitting decomposition of given polytype equality" $
pprEq s1 s2 -- See Note [Do not decompose given polytype equalities]
; stopWith ev "Discard given polytype equality" }
-- See Note [Canonicalising type applications] about why we require flat types
can_eq_nc' True _rdr_env _envs ev eq_rel (AppTy t1 s1) _ ty2 _
| Just (t2, s2) <- tcSplitAppTy_maybe ty2
= can_eq_app ev eq_rel t1 s1 t2 s2
can_eq_nc' True _rdr_env _envs ev eq_rel ty1 _ (AppTy t2 s2) _
| Just (t1, s1) <- tcSplitAppTy_maybe ty1
= can_eq_app ev eq_rel t1 s1 t2 s2
-- No similarity in type structure detected. Flatten and try again.
can_eq_nc' False rdr_env envs ev eq_rel _ ps_ty1 _ ps_ty2
= do { (xi1, co1) <- flatten FM_FlattenAll ev ps_ty1
; (xi2, co2) <- flatten FM_FlattenAll ev ps_ty2
; rewriteEqEvidence ev NotSwapped xi1 xi2 co1 co2
`andWhenContinue` \ new_ev ->
can_eq_nc' True rdr_env envs new_ev eq_rel xi1 xi1 xi2 xi2 }
-- Type variable on LHS or RHS are last. We want only flat types sent
-- to canEqTyVar.
-- See also Note [No top-level newtypes on RHS of representational equalities]
can_eq_nc' True _rdr_env _envs ev eq_rel (TyVarTy tv1) _ _ ps_ty2
= canEqTyVar ev eq_rel NotSwapped tv1 ps_ty2
can_eq_nc' True _rdr_env _envs ev eq_rel _ ps_ty1 (TyVarTy tv2) _
= canEqTyVar ev eq_rel IsSwapped tv2 ps_ty1
-- We've flattened and the types don't match. Give up.
can_eq_nc' True _rdr_env _envs ev _eq_rel _ ps_ty1 _ ps_ty2
= do { traceTcS "can_eq_nc' catch-all case" (ppr ps_ty1 $$ ppr ps_ty2)
; canEqHardFailure ev ps_ty1 ps_ty2 }
---------------------------------
-- | Compare types for equality, while zonking as necessary. Gives up
-- as soon as it finds that two types are not equal.
-- This is quite handy when some unification has made two
-- types in an inert wanted to be equal. We can discover the equality without
-- flattening, which is sometimes very expensive (in the case of type functions).
-- In particular, this function makes a ~20% improvement in test case
-- perf/compiler/T5030.
--
-- Returns either the (partially zonked) types in the case of
-- inequality, or the one type in the case of equality. canEqReflexive is
-- a good next step in the 'Right' case. Returning 'Left' is always safe.
--
-- NB: This does *not* look through type synonyms. In fact, it treats type
-- synonyms as rigid constructors. In the future, it might be convenient
-- to look at only those arguments of type synonyms that actually appear
-- in the synonym RHS. But we're not there yet.
zonk_eq_types :: TcType -> TcType -> TcS (Either (Pair TcType) TcType)
zonk_eq_types = go
where
go (TyVarTy tv1) (TyVarTy tv2) = tyvar_tyvar tv1 tv2
go (TyVarTy tv1) ty2 = tyvar NotSwapped tv1 ty2
go ty1 (TyVarTy tv2) = tyvar IsSwapped tv2 ty1
go ty1 ty2
| Just (tc1, tys1) <- tcRepSplitTyConApp_maybe ty1
, Just (tc2, tys2) <- tcRepSplitTyConApp_maybe ty2
, tc1 == tc2
= tycon tc1 tys1 tys2
go ty1 ty2
| Just (ty1a, ty1b) <- tcRepSplitAppTy_maybe ty1
, Just (ty2a, ty2b) <- tcRepSplitAppTy_maybe ty2
= do { res_a <- go ty1a ty2a
; res_b <- go ty1b ty2b
; return $ combine_rev mkAppTy res_b res_a }
go ty1@(LitTy lit1) (LitTy lit2)
| lit1 == lit2
= return (Right ty1)
go ty1 ty2 = return $ Left (Pair ty1 ty2)
-- we don't handle more complex forms here
tyvar :: SwapFlag -> TcTyVar -> TcType
-> TcS (Either (Pair TcType) TcType)
-- try to do as little as possible, as anything we do here is redundant
-- with flattening. In particular, no need to zonk kinds. That's why
-- we don't use the already-defined zonking functions
tyvar swapped tv ty
= case tcTyVarDetails tv of
MetaTv { mtv_ref = ref }
-> do { cts <- readTcRef ref
; case cts of
Flexi -> give_up
Indirect ty' -> unSwap swapped go ty' ty }
_ -> give_up
where
give_up = return $ Left $ unSwap swapped Pair (mkTyVarTy tv) ty
tyvar_tyvar tv1 tv2
| tv1 == tv2 = return (Right (mkTyVarTy tv1))
| otherwise = do { (ty1', progress1) <- quick_zonk tv1
; (ty2', progress2) <- quick_zonk tv2
; if progress1 || progress2
then go ty1' ty2'
else return $ Left (Pair (TyVarTy tv1) (TyVarTy tv2)) }
quick_zonk tv = case tcTyVarDetails tv of
MetaTv { mtv_ref = ref }
-> do { cts <- readTcRef ref
; case cts of
Flexi -> return (TyVarTy tv, False)
Indirect ty' -> return (ty', True) }
_ -> return (TyVarTy tv, False)
-- This happens for type families, too. But recall that failure
-- here just means to try harder, so it's OK if the type function
-- isn't injective.
tycon :: TyCon -> [TcType] -> [TcType]
-> TcS (Either (Pair TcType) TcType)
tycon tc tys1 tys2
= do { results <- zipWithM go tys1 tys2
; return $ case combine_results results of
Left tys -> Left (mkTyConApp tc <$> tys)
Right tys -> Right (mkTyConApp tc tys) }
combine_results :: [Either (Pair TcType) TcType]
-> Either (Pair [TcType]) [TcType]
combine_results = bimap (fmap reverse) reverse .
foldl' (combine_rev (:)) (Right [])
-- combine (in reverse) a new result onto an already-combined result
combine_rev :: (a -> b -> c)
-> Either (Pair b) b
-> Either (Pair a) a
-> Either (Pair c) c
combine_rev f (Left list) (Left elt) = Left (f <$> elt <*> list)
combine_rev f (Left list) (Right ty) = Left (f <$> pure ty <*> list)
combine_rev f (Right tys) (Left elt) = Left (f <$> elt <*> pure tys)
combine_rev f (Right tys) (Right ty) = Right (f ty tys)
{-
Note [Newtypes can blow the stack]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
newtype X = MkX (Int -> X)
newtype Y = MkY (Int -> Y)
and now wish to prove
[W] X ~R Y
This Wanted will loop, expanding out the newtypes ever deeper looking
for a solid match or a solid discrepancy. Indeed, there is something
appropriate to this looping, because X and Y *do* have the same representation,
in the limit -- they're both (Fix ((->) Int)). However, no finitely-sized
coercion will ever witness it. This loop won't actually cause GHC to hang,
though, because we check our depth when unwrapping newtypes.
Note [Eager reflexivity check]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
newtype X = MkX (Int -> X)
and
[W] X ~R X
Naively, we would start unwrapping X and end up in a loop. Instead,
we do this eager reflexivity check. This is necessary only for representational
equality because the flattener technology deals with the similar case
(recursive type families) for nominal equality.
Note that this check does not catch all cases, but it will catch the cases
we're most worried about, types like X above that are actually inhabited.
Here's another place where this reflexivity check is key:
Consider trying to prove (f a) ~R (f a). The AppTys in there can't
be decomposed, because representational equality isn't congruent with respect
to AppTy. So, when canonicalising the equality above, we get stuck and
would normally produce a CIrredEvCan. However, we really do want to
be able to solve (f a) ~R (f a). So, in the representational case only,
we do a reflexivity check.
(This would be sound in the nominal case, but unnecessary, and I [Richard
E.] am worried that it would slow down the common case.)
-}
------------------------
-- | We're able to unwrap a newtype. Update the bits accordingly.
can_eq_newtype_nc :: GlobalRdrEnv
-> CtEvidence -- ^ :: ty1 ~ ty2
-> SwapFlag
-> TcCoercion -- ^ :: ty1 ~ ty1'
-> TcType -- ^ ty1
-> TcType -- ^ ty1'
-> TcType -- ^ ty2
-> TcType -- ^ ty2, with type synonyms
-> TcS (StopOrContinue Ct)
can_eq_newtype_nc rdr_env ev swapped co ty1 ty1' ty2 ps_ty2
= do { traceTcS "can_eq_newtype_nc" $
vcat [ ppr ev, ppr swapped, ppr co, ppr ty1', ppr ty2 ]
-- check for blowing our stack:
-- See Note [Newtypes can blow the stack]
; checkReductionDepth (ctEvLoc ev) ty1
; addUsedDataCons rdr_env (tyConAppTyCon ty1)
-- we have actually used the newtype constructor here, so
-- make sure we don't warn about importing it!
; rewriteEqEvidence ev swapped ty1' ps_ty2
(mkTcSymCo co) (mkTcReflCo Representational ps_ty2)
`andWhenContinue` \ new_ev ->
can_eq_nc False new_ev ReprEq ty1' ty1' ty2 ps_ty2 }
---------
-- ^ Decompose a type application.
-- All input types must be flat. See Note [Canonicalising type applications]
can_eq_app :: CtEvidence -- :: s1 t1 ~r s2 t2
-> EqRel -- r
-> Xi -> Xi -- s1 t1
-> Xi -> Xi -- s2 t2
-> TcS (StopOrContinue Ct)
-- AppTys only decompose for nominal equality, so this case just leads
-- to an irreducible constraint; see typecheck/should_compile/T10494
-- See Note [Decomposing equality], note {4}
can_eq_app ev ReprEq _ _ _ _
= do { traceTcS "failing to decompose representational AppTy equality" (ppr ev)
; continueWith (CIrredEvCan { cc_ev = ev }) }
-- no need to call canEqFailure, because that flattens, and the
-- types involved here are already flat
can_eq_app ev NomEq s1 t1 s2 t2
| CtDerived { ctev_loc = loc } <- ev
= do { unifyDeriveds loc [Nominal, Nominal] [s1, t1] [s2, t2]
; stopWith ev "Decomposed [D] AppTy" }
| CtWanted { ctev_dest = dest, ctev_loc = loc } <- ev
= do { co_s <- unifyWanted loc Nominal s1 s2
; co_t <- unifyWanted loc Nominal t1 t2
; let co = mkAppCo co_s co_t
; setWantedEq dest co
; stopWith ev "Decomposed [W] AppTy" }
| CtGiven { ctev_evar = evar, ctev_loc = loc } <- ev
= do { let co = mkTcCoVarCo evar
co_s = mkTcLRCo CLeft co
co_t = mkTcLRCo CRight co
; evar_s <- newGivenEvVar loc ( mkTcEqPredLikeEv ev s1 s2
, EvCoercion co_s )
; evar_t <- newGivenEvVar loc ( mkTcEqPredLikeEv ev t1 t2
, EvCoercion co_t )
; emitWorkNC [evar_t]
; canEqNC evar_s NomEq s1 s2 }
| otherwise -- Can't happen
= error "can_eq_app"
-----------------------
-- | Break apart an equality over a casted type
canEqCast :: Bool -- are both types flat?
-> CtEvidence
-> EqRel
-> SwapFlag
-> TcType -> Coercion -- LHS (res. RHS), the casted type
-> TcType -> TcType -- RHS (res. LHS), both normal and pretty
-> TcS (StopOrContinue Ct)
canEqCast flat ev eq_rel swapped ty1 co1 ty2 ps_ty2
= do { traceTcS "Decomposing cast" (vcat [ ppr ev
, ppr ty1 <+> text "|>" <+> ppr co1
, ppr ps_ty2 ])
; rewriteEqEvidence ev swapped ty1 ps_ty2
(mkTcReflCo role ty1
`mkTcCoherenceRightCo` co1)
(mkTcReflCo role ps_ty2)
`andWhenContinue` \ new_ev ->
can_eq_nc flat new_ev eq_rel ty1 ty1 ty2 ps_ty2 }
where
role = eqRelRole eq_rel
------------------------
canTyConApp :: CtEvidence -> EqRel
-> TyCon -> [TcType]
-> TyCon -> [TcType]
-> TcS (StopOrContinue Ct)
-- See Note [Decomposing TyConApps]
canTyConApp ev eq_rel tc1 tys1 tc2 tys2
| tc1 == tc2
, length tys1 == length tys2
= do { inerts <- getTcSInerts
; if can_decompose inerts
then do { traceTcS "canTyConApp"
(ppr ev $$ ppr eq_rel $$ ppr tc1 $$ ppr tys1 $$ ppr tys2)
; canDecomposableTyConAppOK ev eq_rel tc1 tys1 tys2
; stopWith ev "Decomposed TyConApp" }
else canEqFailure ev eq_rel ty1 ty2 }
-- Fail straight away for better error messages
-- See Note [Use canEqFailure in canDecomposableTyConApp]
| eq_rel == ReprEq && not (isGenerativeTyCon tc1 Representational &&
isGenerativeTyCon tc2 Representational)
= canEqFailure ev eq_rel ty1 ty2
| otherwise
= canEqHardFailure ev ty1 ty2
where
ty1 = mkTyConApp tc1 tys1
ty2 = mkTyConApp tc2 tys2
loc = ctEvLoc ev
pred = ctEvPred ev
-- See Note [Decomposing equality]
can_decompose inerts
= isInjectiveTyCon tc1 (eqRelRole eq_rel)
|| (ctEvFlavour ev /= Given && isEmptyBag (matchableGivens loc pred inerts))
{-
Note [Use canEqFailure in canDecomposableTyConApp]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We must use canEqFailure, not canEqHardFailure here, because there is
the possibility of success if working with a representational equality.
Here is one case:
type family TF a where TF Char = Bool
data family DF a
newtype instance DF Bool = MkDF Int
Suppose we are canonicalising (Int ~R DF (TF a)), where we don't yet
know `a`. This is *not* a hard failure, because we might soon learn
that `a` is, in fact, Char, and then the equality succeeds.
Here is another case:
[G] Age ~R Int
where Age's constructor is not in scope. We don't want to report
an "inaccessible code" error in the context of this Given!
For example, see typecheck/should_compile/T10493, repeated here:
import Data.Ord (Down) -- no constructor
foo :: Coercible (Down Int) Int => Down Int -> Int
foo = coerce
That should compile, but only because we use canEqFailure and not
canEqHardFailure.
Note [Decomposing equality]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have a constraint (of any flavour and role) that looks like
T tys1 ~ T tys2, what can we conclude about tys1 and tys2? The answer,
of course, is "it depends". This Note spells it all out.
In this Note, "decomposition" refers to taking the constraint
[fl] (T tys1 ~X T tys2)
(for some flavour fl and some role X) and replacing it with
[fls'] (tys1 ~Xs' tys2)
where that notation indicates a list of new constraints, where the
new constraints may have different flavours and different roles.
The key property to consider is injectivity. When decomposing a Given the
decomposition is sound if and only if T is injective in all of its type
arguments. When decomposing a Wanted, the decomposition is sound (assuming the
correct roles in the produced equality constraints), but it may be a guess --
that is, an unforced decision by the constraint solver. Decomposing Wanteds
over injective TyCons does not entail guessing. But sometimes we want to
decompose a Wanted even when the TyCon involved is not injective! (See below.)
So, in broad strokes, we want this rule:
(*) Decompose a constraint (T tys1 ~X T tys2) if and only if T is injective
at role X.
Pursuing the details requires exploring three axes:
* Flavour: Given vs. Derived vs. Wanted
* Role: Nominal vs. Representational
* TyCon species: datatype vs. newtype vs. data family vs. type family vs. type variable
(So a type variable isn't a TyCon, but it's convenient to put the AppTy case
in the same table.)
Right away, we can say that Derived behaves just as Wanted for the purposes
of decomposition. The difference between Derived and Wanted is the handling of
evidence. Since decomposition in these cases isn't a matter of soundness but of
guessing, we want the same behavior regardless of evidence.
Here is a table (discussion following) detailing where decomposition of
(T s1 ... sn) ~r (T t1 .. tn)
is allowed. The first four lines (Data types ... type family) refer
to TyConApps with various TyCons T; the last line is for AppTy, where
there is presumably a type variable at the head, so it's actually
(s s1 ... sn) ~r (t t1 .. tn)
NOMINAL GIVEN WANTED
Datatype YES YES
Newtype YES YES
Data family YES YES
Type family YES, in injective args{1} YES, in injective args{1}
Type variable YES YES
REPRESENTATIONAL GIVEN WANTED
Datatype YES YES
Newtype NO{2} MAYBE{2}
Data family NO{3} MAYBE{3}
Type family NO NO
Type variable NO{4} NO{4}
{1}: Type families can be injective in some, but not all, of their arguments,
so we want to do partial decomposition. This is quite different than the way
other decomposition is done, where the decomposed equalities replace the original
one. We thus proceed much like we do with superclasses: emitting new Givens
when "decomposing" a partially-injective type family Given and new Deriveds
when "decomposing" a partially-injective type family Wanted. (As of the time of
writing, 13 June 2015, the implementation of injective type families has not
been merged, but it should be soon. Please delete this parenthetical if the
implementation is indeed merged.)
{2}: See Note [Decomposing newtypes at representational role]
{3}: Because of the possibility of newtype instances, we must treat
data families like newtypes. See also Note [Decomposing newtypes at
representational role]. See #10534 and test case
typecheck/should_fail/T10534.
{4}: Because type variables can stand in for newtypes, we conservatively do not
decompose AppTys over representational equality.
In the implementation of can_eq_nc and friends, we don't directly pattern
match using lines like in the tables above, as those tables don't cover
all cases (what about PrimTyCon? tuples?). Instead we just ask about injectivity,
boiling the tables above down to rule (*). The exceptions to rule (*) are for
injective type families, which are handled separately from other decompositions,
and the MAYBE entries above.
Note [Decomposing newtypes at representational role]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This note discusses the 'newtype' line in the REPRESENTATIONAL table
in Note [Decomposing equality]. (At nominal role, newtypes are fully
decomposable.)
Here is a representative example of why representational equality over
newtypes is tricky:
newtype Nt a = Mk Bool -- NB: a is not used in the RHS,
type role Nt representational -- but the user gives it an R role anyway
If we have [W] Nt alpha ~R Nt beta, we *don't* want to decompose to
[W] alpha ~R beta, because it's possible that alpha and beta aren't
representationally equal. Here's another example.
newtype Nt a = MkNt (Id a)
type family Id a where Id a = a
[W] Nt Int ~R Nt Age
Because of its use of a type family, Nt's parameter will get inferred to have
a nominal role. Thus, decomposing the wanted will yield [W] Int ~N Age, which
is unsatisfiable. Unwrapping, though, leads to a solution.
Conclusion:
* Unwrap newtypes before attempting to decompose them.
This is done in can_eq_nc'.
It all comes from the fact that newtypes aren't necessarily injective
w.r.t. representational equality.
Furthermore, as explained in Note [NthCo and newtypes] in TyCoRep, we can't use
NthCo on representational coercions over newtypes. NthCo comes into play
only when decomposing givens.
Conclusion:
* Do not decompose [G] N s ~R N t
Is it sensible to decompose *Wanted* constraints over newtypes? Yes!
It's the only way we could ever prove (IO Int ~R IO Age), recalling
that IO is a newtype.
However we must be careful. Consider
type role Nt representational
[G] Nt a ~R Nt b (1)
[W] NT alpha ~R Nt b (2)
[W] alpha ~ a (3)
If we focus on (3) first, we'll substitute in (2), and now it's
identical to the given (1), so we succeed. But if we focus on (2)
first, and decompose it, we'll get (alpha ~R b), which is not soluble.
This is exactly like the question of overlapping Givens for class
constraints: see Note [Instance and Given overlap] in TcInteract.
Conclusion:
* Decompose [W] N s ~R N t iff there no given constraint that could
later solve it.
-}
canDecomposableTyConAppOK :: CtEvidence -> EqRel
-> TyCon -> [TcType] -> [TcType]
-> TcS ()
-- Precondition: tys1 and tys2 are the same length, hence "OK"
canDecomposableTyConAppOK ev eq_rel tc tys1 tys2
= case ev of
CtDerived {}
-> unifyDeriveds loc tc_roles tys1 tys2
CtWanted { ctev_dest = dest }
-> do { cos <- zipWith4M unifyWanted new_locs tc_roles tys1 tys2
; setWantedEq dest (mkTyConAppCo role tc cos) }
CtGiven { ctev_evar = evar }
-> do { let ev_co = mkCoVarCo evar
; given_evs <- newGivenEvVars loc $
[ ( mkPrimEqPredRole r ty1 ty2
, EvCoercion (mkNthCo i ev_co) )
| (r, ty1, ty2, i) <- zip4 tc_roles tys1 tys2 [0..]
, r /= Phantom
, not (isCoercionTy ty1) && not (isCoercionTy ty2) ]
; emitWorkNC given_evs }
where
loc = ctEvLoc ev
role = eqRelRole eq_rel
tc_roles = tyConRolesX role tc
-- the following makes a better distinction between "kind" and "type"
-- in error messages
(bndrs, _) = splitPiTys (tyConKind tc)
kind_loc = toKindLoc loc
is_kinds = map isNamedBinder bndrs
new_locs | Just KindLevel <- ctLocTypeOrKind_maybe loc
= repeat loc
| otherwise
= map (\is_kind -> if is_kind then kind_loc else loc) is_kinds
-- | Call when canonicalizing an equality fails, but if the equality is
-- representational, there is some hope for the future.
-- Examples in Note [Use canEqFailure in canDecomposableTyConApp]
canEqFailure :: CtEvidence -> EqRel
-> TcType -> TcType -> TcS (StopOrContinue Ct)
canEqFailure ev NomEq ty1 ty2
= canEqHardFailure ev ty1 ty2
canEqFailure ev ReprEq ty1 ty2
= do { (xi1, co1) <- flatten FM_FlattenAll ev ty1
; (xi2, co2) <- flatten FM_FlattenAll ev ty2
-- We must flatten the types before putting them in the
-- inert set, so that we are sure to kick them out when
-- new equalities become available
; traceTcS "canEqFailure with ReprEq" $
vcat [ ppr ev, ppr ty1, ppr ty2, ppr xi1, ppr xi2 ]
; rewriteEqEvidence ev NotSwapped xi1 xi2 co1 co2
`andWhenContinue` \ new_ev ->
continueWith (CIrredEvCan { cc_ev = new_ev }) }
-- | Call when canonicalizing an equality fails with utterly no hope.
canEqHardFailure :: CtEvidence
-> TcType -> TcType -> TcS (StopOrContinue Ct)
-- See Note [Make sure that insolubles are fully rewritten]
canEqHardFailure ev ty1 ty2
= do { (s1, co1) <- flatten FM_SubstOnly ev ty1
; (s2, co2) <- flatten FM_SubstOnly ev ty2
; rewriteEqEvidence ev NotSwapped s1 s2 co1 co2
`andWhenContinue` \ new_ev ->
do { emitInsoluble (mkNonCanonical new_ev)
; stopWith new_ev "Definitely not equal" }}
{-
Note [Decomposing TyConApps]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we see (T s1 t1 ~ T s2 t2), then we can just decompose to
(s1 ~ s2, t1 ~ t2)
and push those back into the work list. But if
s1 = K k1 s2 = K k2
then we will just decomopose s1~s2, and it might be better to
do so on the spot. An important special case is where s1=s2,
and we get just Refl.
So canDecomposableTyCon is a fast-path decomposition that uses
unifyWanted etc to short-cut that work.
Note [Canonicalising type applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Given (s1 t1) ~ ty2, how should we proceed?
The simple things is to see if ty2 is of form (s2 t2), and
decompose. By this time s1 and s2 can't be saturated type
function applications, because those have been dealt with
by an earlier equation in can_eq_nc, so it is always sound to
decompose.
However, over-eager decomposition gives bad error messages
for things like
a b ~ Maybe c
e f ~ p -> q
Suppose (in the first example) we already know a~Array. Then if we
decompose the application eagerly, yielding
a ~ Maybe
b ~ c
we get an error "Can't match Array ~ Maybe",
but we'd prefer to get "Can't match Array b ~ Maybe c".
So instead can_eq_wanted_app flattens the LHS and RHS, in the hope of
replacing (a b) by (Array b), before using try_decompose_app to
decompose it.
Note [Make sure that insolubles are fully rewritten]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When an equality fails, we still want to rewrite the equality
all the way down, so that it accurately reflects
(a) the mutable reference substitution in force at start of solving
(b) any ty-binds in force at this point in solving
See Note [Kick out insolubles] in TcSMonad.
And if we don't do this there is a bad danger that
TcSimplify.applyTyVarDefaulting will find a variable
that has in fact been substituted.
Note [Do not decompose Given polytype equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider [G] (forall a. t1 ~ forall a. t2). Can we decompose this?
No -- what would the evidence look like? So instead we simply discard
this given evidence.
Note [Combining insoluble constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
As this point we have an insoluble constraint, like Int~Bool.
* If it is Wanted, delete it from the cache, so that subsequent
Int~Bool constraints give rise to separate error messages
* But if it is Derived, DO NOT delete from cache. A class constraint
may get kicked out of the inert set, and then have its functional
dependency Derived constraints generated a second time. In that
case we don't want to get two (or more) error messages by
generating two (or more) insoluble fundep constraints from the same
class constraint.
Note [No top-level newtypes on RHS of representational equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we're in this situation:
work item: [W] c1 : a ~R b
inert: [G] c2 : b ~R Id a
where
newtype Id a = Id a
We want to make sure canEqTyVar sees [W] a ~R a, after b is flattened
and the Id newtype is unwrapped. This is assured by requiring only flat
types in canEqTyVar *and* having the newtype-unwrapping check above
the tyvar check in can_eq_nc.
Note [Occurs check error]
~~~~~~~~~~~~~~~~~~~~~~~~~
If we have an occurs check error, are we necessarily hosed? Say our
tyvar is tv1 and the type it appears in is xi2. Because xi2 is function
free, then if we're computing w.r.t. nominal equality, then, yes, we're
hosed. Nothing good can come from (a ~ [a]). If we're computing w.r.t.
representational equality, this is a little subtler. Once again, (a ~R [a])
is a bad thing, but (a ~R N a) for a newtype N might be just fine. This
means also that (a ~ b a) might be fine, because `b` might become a newtype.
So, we must check: does tv1 appear in xi2 under any type constructor that
is generative w.r.t. representational equality? That's what isTyVarUnderDatatype
does. (The other name I considered, isTyVarUnderTyConGenerativeWrtReprEq was
a bit verbose. And the shorter name gets the point across.)
See also #10715, which induced this addition.
Note [No derived kind equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we're working with a heterogeneous derived equality
[D] (t1 :: k1) ~ (t2 :: k2)
we want to homogenise to establish the kind invariant on CTyEqCans.
But we can't emit [D] k1 ~ k2 because we wouldn't then be able to
use the evidence in the homogenised types. So we emit a wanted
constraint, because we do really need the evidence here.
Thus: no derived kind equalities.
-}
canCFunEqCan :: CtEvidence
-> TyCon -> [TcType] -- LHS
-> TcTyVar -- RHS
-> TcS (StopOrContinue Ct)
-- ^ Canonicalise a CFunEqCan. We know that
-- the arg types are already flat,
-- and the RHS is a fsk, which we must *not* substitute.
-- So just substitute in the LHS
canCFunEqCan ev fn tys fsk
= do { (tys', cos) <- flattenManyNom ev tys
-- cos :: tys' ~ tys
; let lhs_co = mkTcTyConAppCo Nominal fn cos
-- :: F tys' ~ F tys
new_lhs = mkTyConApp fn tys'
fsk_ty = mkTyVarTy fsk
; rewriteEqEvidence ev NotSwapped new_lhs fsk_ty
lhs_co (mkTcNomReflCo fsk_ty)
`andWhenContinue` \ ev' ->
do { extendFlatCache fn tys' (ctEvCoercion ev', fsk_ty, ctEvFlavour ev')
; continueWith (CFunEqCan { cc_ev = ev', cc_fun = fn
, cc_tyargs = tys', cc_fsk = fsk }) } }
---------------------
canEqTyVar :: CtEvidence -> EqRel -> SwapFlag
-> TcTyVar -- already flat
-> TcType -- already flat
-> TcS (StopOrContinue Ct)
-- A TyVar on LHS, but so far un-zonked
canEqTyVar ev eq_rel swapped tv1 ps_ty2 -- ev :: tv ~ s2
= do { traceTcS "canEqTyVar" (ppr tv1 $$ ppr ps_ty2 $$ ppr swapped)
-- FM_Avoid commented out: see Note [Lazy flattening] in TcFlatten
-- let fmode = FE { fe_ev = ev, fe_mode = FM_Avoid tv1' True }
-- Flatten the RHS less vigorously, to avoid gratuitous flattening
-- True <=> xi2 should not itself be a type-function application
; dflags <- getDynFlags
; canEqTyVar2 dflags ev eq_rel swapped tv1 ps_ty2 }
canEqTyVar2 :: DynFlags
-> CtEvidence -- lhs ~ rhs (or, if swapped, orhs ~ olhs)
-> EqRel
-> SwapFlag
-> TcTyVar -- lhs, flat
-> TcType -- rhs, flat
-> TcS (StopOrContinue Ct)
-- LHS is an inert type variable,
-- and RHS is fully rewritten, but with type synonyms
-- preserved as much as possible
canEqTyVar2 dflags ev eq_rel swapped tv1 xi2
| Just (tv2, kco2) <- getCastedTyVar_maybe xi2
= canEqTyVarTyVar ev eq_rel swapped tv1 tv2 kco2
| OC_OK xi2' <- occurCheckExpand dflags tv1 xi2 -- No occurs check
-- We use xi2' on the RHS of the new CTyEqCan, a ~ xi2'
-- to establish the invariant that a does not appear in the
-- rhs of the CTyEqCan. This is guaranteed by occurCheckExpand;
-- see Note [Occurs check expansion] in TcType
= rewriteEqEvidence ev swapped xi1 xi2' co1 (mkTcReflCo role xi2')
`andWhenContinue` \ new_ev ->
homogeniseRhsKind new_ev eq_rel xi1 xi2' $ \new_new_ev xi2'' ->
CTyEqCan { cc_ev = new_new_ev, cc_tyvar = tv1
, cc_rhs = xi2'', cc_eq_rel = eq_rel }
| otherwise -- Occurs check error
= do { traceTcS "canEqTyVar2 occurs check error" (ppr tv1 $$ ppr xi2)
; rewriteEqEvidence ev swapped xi1 xi2 co1 co2
`andWhenContinue` \ new_ev ->
if eq_rel == NomEq || isTyVarUnderDatatype tv1 xi2
then do { emitInsoluble (mkNonCanonical new_ev)
-- If we have a ~ [a], it is not canonical, and in particular
-- we don't want to rewrite existing inerts with it, otherwise
-- we'd risk divergence in the constraint solver
; stopWith new_ev "Occurs check" }
-- A representational equality with an occurs-check problem isn't
-- insoluble! For example:
-- a ~R b a
-- We might learn that b is the newtype Id.
-- But, the occurs-check certainly prevents the equality from being
-- canonical, and we might loop if we were to use it in rewriting.
else do { traceTcS "Occurs-check in representational equality"
(ppr xi1 $$ ppr xi2)
; continueWith (CIrredEvCan { cc_ev = new_ev }) } }
where
role = eqRelRole eq_rel
xi1 = mkTyVarTy tv1
co1 = mkTcReflCo role xi1
co2 = mkTcReflCo role xi2
canEqTyVarTyVar :: CtEvidence -- tv1 ~ rhs (or rhs ~ tv1, if swapped)
-> EqRel
-> SwapFlag
-> TcTyVar -> TcTyVar -- tv1, tv2
-> Coercion -- the co in (rhs = tv2 |> co)
-> TcS (StopOrContinue Ct)
-- Both LHS and RHS rewrote to a type variable
-- See Note [Canonical orientation for tyvar/tyvar equality constraints]
canEqTyVarTyVar ev eq_rel swapped tv1 tv2 kco2
| tv1 == tv2
= do { let mk_coh = case swapped of IsSwapped -> mkTcCoherenceLeftCo
NotSwapped -> mkTcCoherenceRightCo
; setEvBindIfWanted ev (EvCoercion $ mkTcReflCo role xi1 `mk_coh` kco2)
; stopWith ev "Equal tyvars" }
-- We don't do this any more
-- See Note [Orientation of equalities with fmvs] in TcFlatten
-- | isFmvTyVar tv1 = do_fmv swapped tv1 xi1 xi2 co1 co2
-- | isFmvTyVar tv2 = do_fmv (flipSwap swapped) tv2 xi2 xi1 co2 co1
| swap_over = do_swap
| otherwise = no_swap
where
role = eqRelRole eq_rel
xi1 = mkTyVarTy tv1
co1 = mkTcReflCo role xi1
xi2 = mkTyVarTy tv2
co2 = mkTcReflCo role xi2 `mkTcCoherenceRightCo` kco2
no_swap = canon_eq swapped tv1 xi1 xi2 co1 co2
do_swap = canon_eq (flipSwap swapped) tv2 xi2 xi1 co2 co1
canon_eq swapped tv1 ty1 ty2 co1 co2
-- ev : tv1 ~ orhs (not swapped) or orhs ~ tv1 (swapped)
-- co1 : xi1 ~ tv1
-- co2 : xi2 ~ tv2
= do { traceTcS "canEqTyVarTyVar"
(vcat [ ppr swapped
, ppr tv1 <+> dcolon <+> ppr (tyVarKind tv1)
, ppr ty1 <+> dcolon <+> ppr (typeKind ty1)
, ppr ty2 <+> dcolon <+> ppr (typeKind ty2)
, ppr co1 <+> dcolon <+> ppr (tcCoercionKind co1)
, ppr co2 <+> dcolon <+> ppr (tcCoercionKind co2) ])
; rewriteEqEvidence ev swapped ty1 ty2 co1 co2
`andWhenContinue` \ new_ev ->
homogeniseRhsKind new_ev eq_rel ty1 ty2 $ \new_new_ev ty2' ->
CTyEqCan { cc_ev = new_new_ev, cc_tyvar = tv1
, cc_rhs = ty2', cc_eq_rel = eq_rel } }
{- We don't do this any more
See Note [Orientation of equalities with fmvs] in TcFlatten
-- tv1 is the flatten meta-var
do_fmv swapped tv1 xi1 xi2 co1 co2
| same_kind
= canon_eq swapped tv1 xi1 xi2 co1 co2
| otherwise -- Presumably tv1 :: *, since it is a flatten meta-var,
-- at a kind that has some interesting sub-kind structure.
-- Since the two kinds are not the same, we must have
-- tv1 `subKind` tv2, which is the wrong way round
-- e.g. (fmv::*) ~ (a::OpenKind)
-- So make a new meta-var and use that:
-- fmv ~ (beta::*)
-- (a::OpenKind) ~ (beta::*)
= ASSERT2( k1_sub_k2,
ppr tv1 <+> dcolon <+> ppr (tyVarKind tv1) $$
ppr xi2 <+> dcolon <+> ppr (typeKind xi2) )
ASSERT2( isWanted ev, ppr ev ) -- Only wanteds have flatten meta-vars
do { tv_ty <- newFlexiTcSTy (tyVarKind tv1)
; new_ev <- newWantedEvVarNC (ctEvLoc ev)
(mkPrimEqPredRole (eqRelRole eq_rel)
g tv_ty xi2)
; emitWorkNC [new_ev]
; canon_eq swapped tv1 xi1 tv_ty co1 (ctEvCoercion new_ev) }
-}
swap_over
-- If tv1 is touchable, swap only if tv2 is also
-- touchable and it's strictly better to update the latter
-- But see Note [Avoid unnecessary swaps]
| Just lvl1 <- metaTyVarTcLevel_maybe tv1
= case metaTyVarTcLevel_maybe tv2 of
Nothing -> False
Just lvl2 | lvl2 `strictlyDeeperThan` lvl1 -> True
| lvl1 `strictlyDeeperThan` lvl2 -> False
| otherwise -> nicer_to_update_tv2
-- So tv1 is not a meta tyvar
-- If only one is a meta tyvar, put it on the left
-- This is not because it'll be solved; but because
-- the floating step looks for meta tyvars on the left
| isMetaTyVar tv2 = True
-- So neither is a meta tyvar (including FlatMetaTv)
-- If only one is a flatten skolem, put it on the left
-- See Note [Eliminate flat-skols]
| not (isFlattenTyVar tv1), isFlattenTyVar tv2 = True
| otherwise = False
nicer_to_update_tv2
= (isSigTyVar tv1 && not (isSigTyVar tv2))
|| (isSystemName (Var.varName tv2) && not (isSystemName (Var.varName tv1)))
-- | Solve a reflexive equality constraint
canEqReflexive :: CtEvidence -- ty ~ ty
-> EqRel
-> TcType -- ty
-> TcS (StopOrContinue Ct) -- always Stop
canEqReflexive ev eq_rel ty
= do { setEvBindIfWanted ev (EvCoercion $
mkTcReflCo (eqRelRole eq_rel) ty)
; stopWith ev "Solved by reflexivity" }
-- See Note [Equalities with incompatible kinds]
homogeniseRhsKind :: CtEvidence -- ^ the evidence to homogenise
-> EqRel
-> TcType -- ^ original LHS
-> Xi -- ^ original RHS
-> (CtEvidence -> Xi -> Ct)
-- ^ how to build the homogenised constraint;
-- the 'Xi' is the new RHS
-> TcS (StopOrContinue Ct)
homogeniseRhsKind ev eq_rel lhs rhs build_ct
| k1 `eqType` k2
= continueWith (build_ct ev rhs)
| CtGiven { ctev_evar = evar } <- ev
-- tm :: (lhs :: k1) ~ (rhs :: k2)
= do { kind_ev_id <- newBoundEvVarId kind_pty
(EvCoercion $
mkTcKindCo $ mkTcCoVarCo evar)
-- kind_ev_id :: (k1 :: *) ~# (k2 :: *)
; let kind_ev = CtGiven { ctev_pred = kind_pty
, ctev_evar = kind_ev_id
, ctev_loc = kind_loc }
homo_co = mkSymCo $ mkCoVarCo kind_ev_id
rhs' = mkCastTy rhs homo_co
; traceTcS "Hetero equality gives rise to given kind equality"
(ppr kind_ev_id <+> dcolon <+> ppr kind_pty)
; emitWorkNC [kind_ev]
; type_ev <- newGivenEvVar loc
( mkTcEqPredLikeEv ev lhs rhs'
, EvCoercion $
mkTcCoherenceRightCo (mkTcCoVarCo evar) homo_co )
-- type_ev :: (lhs :: k1) ~ ((rhs |> sym kind_ev_id) :: k1)
; continueWith (build_ct type_ev rhs') }
| otherwise -- Wanted and Derived. See Note [No derived kind equalities]
-- evar :: (lhs :: k1) ~ (rhs :: k2)
= do { (kind_ev, kind_co) <- newWantedEq kind_loc Nominal k1 k2
-- kind_ev :: (k1 :: *) ~ (k2 :: *)
; traceTcS "Hetero equality gives rise to wanted kind equality" $
ppr (kind_ev)
; emitWorkNC [kind_ev]
; let homo_co = mkSymCo kind_co
-- homo_co :: k2 ~ k1
rhs' = mkCastTy rhs homo_co
; case ev of
CtGiven {} -> panic "homogeniseRhsKind"
CtDerived {} -> continueWith (build_ct (ev { ctev_pred = homo_pred })
rhs')
where homo_pred = mkTcEqPredLikeEv ev lhs rhs'
CtWanted { ctev_dest = dest } -> do
{ (type_ev, hole_co) <- newWantedEq loc role lhs rhs'
-- type_ev :: (lhs :: k1) ~ (rhs |> sym kind_ev :: k1)
; setWantedEq dest
(hole_co `mkTransCo`
(mkReflCo role rhs
`mkCoherenceLeftCo` homo_co))
-- dest := hole ; <rhs> |> homo_co :: (lhs :: k1) ~ (rhs :: k2)
; continueWith (build_ct type_ev rhs') }}
where
k1 = typeKind lhs
k2 = typeKind rhs
kind_pty = mkHeteroPrimEqPred liftedTypeKind liftedTypeKind k1 k2
kind_loc = mkKindLoc lhs rhs loc
loc = ctev_loc ev
role = eqRelRole eq_rel
{-
Note [Canonical orientation for tyvar/tyvar equality constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we have a ~ b where both 'a' and 'b' are TcTyVars, which way
round should be oriented in the CTyEqCan? The rules, implemented by
canEqTyVarTyVar, are these
* If either is a flatten-meta-variables, it goes on the left.
* If one is a strict sub-kind of the other e.g.
(alpha::?) ~ (beta::*)
orient them so RHS is a subkind of LHS. That way we will replace
'a' with 'b', correctly narrowing the kind.
This establishes the subkind invariant of CTyEqCan.
* Put a meta-tyvar on the left if possible
alpha[3] ~ r
* If both are meta-tyvars, put the more touchable one (deepest level
number) on the left, so there is the best chance of unifying it
alpha[3] ~ beta[2]
* If both are meta-tyvars and both at the same level, put a SigTv
on the right if possible
alpha[2] ~ beta[2](sig-tv)
That way, when we unify alpha := beta, we don't lose the SigTv flag.
* Put a meta-tv with a System Name on the left if possible so it
gets eliminated (improves error messages)
* If one is a flatten-skolem, put it on the left so that it is
substituted out Note [Elminate flat-skols]
fsk ~ a
Note [Avoid unnecessary swaps]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we swap without actually improving matters, we can get an infnite loop.
Consider
work item: a ~ b
inert item: b ~ c
We canonicalise the work-time to (a ~ c). If we then swap it before
aeding to the inert set, we'll add (c ~ a), and therefore kick out the
inert guy, so we get
new work item: b ~ c
inert item: c ~ a
And now the cycle just repeats
Note [Eliminate flat-skols]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have [G] Num (F [a])
then we flatten to
[G] Num fsk
[G] F [a] ~ fsk
where fsk is a flatten-skolem (FlatSkol). Suppose we have
type instance F [a] = a
then we'll reduce the second constraint to
[G] a ~ fsk
and then replace all uses of 'a' with fsk. That's bad because
in error messages intead of saying 'a' we'll say (F [a]). In all
places, including those where the programmer wrote 'a' in the first
place. Very confusing! See Trac #7862.
Solution: re-orient a~fsk to fsk~a, so that we preferentially eliminate
the fsk.
Note [Equalities with incompatible kinds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
canEqLeaf is about to make a CTyEqCan or CFunEqCan; but both have the
invariant that LHS and RHS satisfy the kind invariants for CTyEqCan,
CFunEqCan. What if we try to unify two things with incompatible
kinds?
eg a ~ b where a::*, b::*->*
or a ~ b where a::*, b::k, k is a kind variable
The CTyEqCan compatKind invariant is important. If we make a CTyEqCan
for a~b, then we might well *substitute* 'b' for 'a', and that might make
a well-kinded type ill-kinded; and that is bad (eg typeKind can crash, see
Trac #7696).
So instead for these ill-kinded equalities we homogenise the RHS of the
equality, emitting new constraints as necessary.
Note [Type synonyms and canonicalization]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We treat type synonym applications as xi types, that is, they do not
count as type function applications. However, we do need to be a bit
careful with type synonyms: like type functions they may not be
generative or injective. However, unlike type functions, they are
parametric, so there is no problem in expanding them whenever we see
them, since we do not need to know anything about their arguments in
order to expand them; this is what justifies not having to treat them
as specially as type function applications. The thing that causes
some subtleties is that we prefer to leave type synonym applications
*unexpanded* whenever possible, in order to generate better error
messages.
If we encounter an equality constraint with type synonym applications
on both sides, or a type synonym application on one side and some sort
of type application on the other, we simply must expand out the type
synonyms in order to continue decomposing the equality constraint into
primitive equality constraints. For example, suppose we have
type F a = [Int]
and we encounter the equality
F a ~ [b]
In order to continue we must expand F a into [Int], giving us the
equality
[Int] ~ [b]
which we can then decompose into the more primitive equality
constraint
Int ~ b.
However, if we encounter an equality constraint with a type synonym
application on one side and a variable on the other side, we should
NOT (necessarily) expand the type synonym, since for the purpose of
good error messages we want to leave type synonyms unexpanded as much
as possible. Hence the ps_ty1, ps_ty2 argument passed to canEqTyVar.
-}
{-
************************************************************************
* *
Evidence transformation
* *
************************************************************************
-}
data StopOrContinue a
= ContinueWith a -- The constraint was not solved, although it may have
-- been rewritten
| Stop CtEvidence -- The (rewritten) constraint was solved
SDoc -- Tells how it was solved
-- Any new sub-goals have been put on the work list
instance Functor StopOrContinue where
fmap f (ContinueWith x) = ContinueWith (f x)
fmap _ (Stop ev s) = Stop ev s
instance Outputable a => Outputable (StopOrContinue a) where
ppr (Stop ev s) = text "Stop" <> parens s <+> ppr ev
ppr (ContinueWith w) = text "ContinueWith" <+> ppr w
continueWith :: a -> TcS (StopOrContinue a)
continueWith = return . ContinueWith
stopWith :: CtEvidence -> String -> TcS (StopOrContinue a)
stopWith ev s = return (Stop ev (text s))
andWhenContinue :: TcS (StopOrContinue a)
-> (a -> TcS (StopOrContinue b))
-> TcS (StopOrContinue b)
andWhenContinue tcs1 tcs2
= do { r <- tcs1
; case r of
Stop ev s -> return (Stop ev s)
ContinueWith ct -> tcs2 ct }
infixr 0 `andWhenContinue` -- allow chaining with ($)
rewriteEvidence :: CtEvidence -- old evidence
-> TcPredType -- new predicate
-> TcCoercion -- Of type :: new predicate ~ <type of old evidence>
-> TcS (StopOrContinue CtEvidence)
-- Returns Just new_ev iff either (i) 'co' is reflexivity
-- or (ii) 'co' is not reflexivity, and 'new_pred' not cached
-- In either case, there is nothing new to do with new_ev
{-
rewriteEvidence old_ev new_pred co
Main purpose: create new evidence for new_pred;
unless new_pred is cached already
* Returns a new_ev : new_pred, with same wanted/given/derived flag as old_ev
* If old_ev was wanted, create a binding for old_ev, in terms of new_ev
* If old_ev was given, AND not cached, create a binding for new_ev, in terms of old_ev
* Returns Nothing if new_ev is already cached
Old evidence New predicate is Return new evidence
flavour of same flavor
-------------------------------------------------------------------
Wanted Already solved or in inert Nothing
or Derived Not Just new_evidence
Given Already in inert Nothing
Not Just new_evidence
Note [Rewriting with Refl]
~~~~~~~~~~~~~~~~~~~~~~~~~~
If the coercion is just reflexivity then you may re-use the same
variable. But be careful! Although the coercion is Refl, new_pred
may reflect the result of unification alpha := ty, so new_pred might
not _look_ the same as old_pred, and it's vital to proceed from now on
using new_pred.
The flattener preserves type synonyms, so they should appear in new_pred
as well as in old_pred; that is important for good error messages.
-}
rewriteEvidence old_ev@(CtDerived {}) new_pred _co
= -- If derived, don't even look at the coercion.
-- This is very important, DO NOT re-order the equations for
-- rewriteEvidence to put the isTcReflCo test first!
-- Why? Because for *Derived* constraints, c, the coercion, which
-- was produced by flattening, may contain suspended calls to
-- (ctEvTerm c), which fails for Derived constraints.
-- (Getting this wrong caused Trac #7384.)
continueWith (old_ev { ctev_pred = new_pred })
rewriteEvidence old_ev new_pred co
| isTcReflCo co -- See Note [Rewriting with Refl]
= continueWith (old_ev { ctev_pred = new_pred })
rewriteEvidence ev@(CtGiven { ctev_evar = old_evar , ctev_loc = loc }) new_pred co
= do { new_ev <- newGivenEvVar loc (new_pred, new_tm)
; continueWith new_ev }
where
-- mkEvCast optimises ReflCo
new_tm = mkEvCast (EvId old_evar) (tcDowngradeRole Representational
(ctEvRole ev)
(mkTcSymCo co))
rewriteEvidence ev@(CtWanted { ctev_dest = dest
, ctev_loc = loc }) new_pred co
= do { mb_new_ev <- newWanted loc new_pred
; MASSERT( tcCoercionRole co == ctEvRole ev )
; setWantedEvTerm dest
(mkEvCast (getEvTerm mb_new_ev)
(tcDowngradeRole Representational (ctEvRole ev) co))
; case mb_new_ev of
Fresh new_ev -> continueWith new_ev
Cached _ -> stopWith ev "Cached wanted" }
rewriteEqEvidence :: CtEvidence -- Old evidence :: olhs ~ orhs (not swapped)
-- or orhs ~ olhs (swapped)
-> SwapFlag
-> TcType -> TcType -- New predicate nlhs ~ nrhs
-- Should be zonked, because we use typeKind on nlhs/nrhs
-> TcCoercion -- lhs_co, of type :: nlhs ~ olhs
-> TcCoercion -- rhs_co, of type :: nrhs ~ orhs
-> TcS (StopOrContinue CtEvidence) -- Of type nlhs ~ nrhs
-- For (rewriteEqEvidence (Given g olhs orhs) False nlhs nrhs lhs_co rhs_co)
-- we generate
-- If not swapped
-- g1 : nlhs ~ nrhs = lhs_co ; g ; sym rhs_co
-- If 'swapped'
-- g1 : nlhs ~ nrhs = lhs_co ; Sym g ; sym rhs_co
--
-- For (Wanted w) we do the dual thing.
-- New w1 : nlhs ~ nrhs
-- If not swapped
-- w : olhs ~ orhs = sym lhs_co ; w1 ; rhs_co
-- If swapped
-- w : orhs ~ olhs = sym rhs_co ; sym w1 ; lhs_co
--
-- It's all a form of rewwriteEvidence, specialised for equalities
rewriteEqEvidence old_ev swapped nlhs nrhs lhs_co rhs_co
| CtDerived {} <- old_ev -- Don't force the evidence for a Derived
= continueWith (old_ev { ctev_pred = new_pred })
| NotSwapped <- swapped
, isTcReflCo lhs_co -- See Note [Rewriting with Refl]
, isTcReflCo rhs_co
= continueWith (old_ev { ctev_pred = new_pred })
| CtGiven { ctev_evar = old_evar } <- old_ev
= do { let new_tm = EvCoercion (lhs_co
`mkTcTransCo` maybeSym swapped (mkTcCoVarCo old_evar)
`mkTcTransCo` mkTcSymCo rhs_co)
; new_ev <- newGivenEvVar loc' (new_pred, new_tm)
; continueWith new_ev }
| CtWanted { ctev_dest = dest } <- old_ev
= do { (new_ev, hole_co) <- newWantedEq loc' (ctEvRole old_ev) nlhs nrhs
; let co = maybeSym swapped $
mkSymCo lhs_co
`mkTransCo` hole_co
`mkTransCo` rhs_co
; setWantedEq dest co
; traceTcS "rewriteEqEvidence" (vcat [ppr old_ev, ppr nlhs, ppr nrhs, ppr co])
; continueWith new_ev }
| otherwise
= panic "rewriteEvidence"
where
new_pred = mkTcEqPredLikeEv old_ev nlhs nrhs
-- equality is like a type class. Bumping the depth is necessary because
-- of recursive newtypes, where "reducing" a newtype can actually make
-- it bigger. See Note [Newtypes can blow the stack].
loc = ctEvLoc old_ev
loc' = bumpCtLocDepth loc
{- Note [unifyWanted and unifyDerived]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When decomposing equalities we often create new wanted constraints for
(s ~ t). But what if s=t? Then it'd be faster to return Refl right away.
Similar remarks apply for Derived.
Rather than making an equality test (which traverses the structure of the
type, perhaps fruitlessly, unifyWanted traverses the common structure, and
bales out when it finds a difference by creating a new Wanted constraint.
But where it succeeds in finding common structure, it just builds a coercion
to reflect it.
-}
unifyWanted :: CtLoc -> Role
-> TcType -> TcType -> TcS Coercion
-- Return coercion witnessing the equality of the two types,
-- emitting new work equalities where necessary to achieve that
-- Very good short-cut when the two types are equal, or nearly so
-- See Note [unifyWanted and unifyDerived]
-- The returned coercion's role matches the input parameter
unifyWanted loc Phantom ty1 ty2
= do { kind_co <- unifyWanted loc Nominal (typeKind ty1) (typeKind ty2)
; return (mkPhantomCo kind_co ty1 ty2) }
unifyWanted loc role orig_ty1 orig_ty2
= go orig_ty1 orig_ty2
where
go ty1 ty2 | Just ty1' <- coreView ty1 = go ty1' ty2
go ty1 ty2 | Just ty2' <- coreView ty2 = go ty1 ty2'
go (ForAllTy (Anon s1) t1) (ForAllTy (Anon s2) t2)
= do { co_s <- unifyWanted loc role s1 s2
; co_t <- unifyWanted loc role t1 t2
; return (mkTyConAppCo role funTyCon [co_s,co_t]) }
go (TyConApp tc1 tys1) (TyConApp tc2 tys2)
| tc1 == tc2, tys1 `equalLength` tys2
, isInjectiveTyCon tc1 role -- don't look under newtypes at Rep equality
= do { cos <- zipWith3M (unifyWanted loc)
(tyConRolesX role tc1) tys1 tys2
; return (mkTyConAppCo role tc1 cos) }
go (TyVarTy tv) ty2
= do { mb_ty <- isFilledMetaTyVar_maybe tv
; case mb_ty of
Just ty1' -> go ty1' ty2
Nothing -> bale_out }
go ty1 (TyVarTy tv)
= do { mb_ty <- isFilledMetaTyVar_maybe tv
; case mb_ty of
Just ty2' -> go ty1 ty2'
Nothing -> bale_out }
go ty1@(CoercionTy {}) (CoercionTy {})
= return (mkReflCo role ty1) -- we just don't care about coercions!
go _ _ = bale_out
bale_out = do { (new_ev, co) <- newWantedEq loc role orig_ty1 orig_ty2
; emitWorkNC [new_ev]
; return co }
unifyDeriveds :: CtLoc -> [Role] -> [TcType] -> [TcType] -> TcS ()
-- See Note [unifyWanted and unifyDerived]
unifyDeriveds loc roles tys1 tys2 = zipWith3M_ (unify_derived loc) roles tys1 tys2
unifyDerived :: CtLoc -> Role -> Pair TcType -> TcS ()
-- See Note [unifyWanted and unifyDerived]
unifyDerived loc role (Pair ty1 ty2) = unify_derived loc role ty1 ty2
unify_derived :: CtLoc -> Role -> TcType -> TcType -> TcS ()
-- Create new Derived and put it in the work list
-- Should do nothing if the two types are equal
-- See Note [unifyWanted and unifyDerived]
unify_derived _ Phantom _ _ = return ()
unify_derived loc role orig_ty1 orig_ty2
= go orig_ty1 orig_ty2
where
go ty1 ty2 | Just ty1' <- coreView ty1 = go ty1' ty2
go ty1 ty2 | Just ty2' <- coreView ty2 = go ty1 ty2'
go (ForAllTy (Anon s1) t1) (ForAllTy (Anon s2) t2)
= do { unify_derived loc role s1 s2
; unify_derived loc role t1 t2 }
go (TyConApp tc1 tys1) (TyConApp tc2 tys2)
| tc1 == tc2, tys1 `equalLength` tys2
, isInjectiveTyCon tc1 role
= unifyDeriveds loc (tyConRolesX role tc1) tys1 tys2
go (TyVarTy tv) ty2
= do { mb_ty <- isFilledMetaTyVar_maybe tv
; case mb_ty of
Just ty1' -> go ty1' ty2
Nothing -> bale_out }
go ty1 (TyVarTy tv)
= do { mb_ty <- isFilledMetaTyVar_maybe tv
; case mb_ty of
Just ty2' -> go ty1 ty2'
Nothing -> bale_out }
go _ _ = bale_out
-- no point in having *boxed* deriveds.
bale_out = emitNewDerivedEq loc role orig_ty1 orig_ty2
maybeSym :: SwapFlag -> TcCoercion -> TcCoercion
maybeSym IsSwapped co = mkTcSymCo co
maybeSym NotSwapped co = co
|
gridaphobe/ghc
|
compiler/typecheck/TcCanonical.hs
|
bsd-3-clause
| 80,800 | 118 | 26 | 22,783 | 10,255 | 5,423 | 4,832 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
module Database.Redis.Types (
RedisT,
Value(..),Score, SortedSet,
Request(..), Reply(..),
) where
import Control.Applicative
import Data.Binary
import qualified Data.ByteString as S
import qualified Data.HashMap.Strict as HMS
import qualified Data.HashSet as HS
import Data.Int
import qualified Data.Sequence as Seq
import qualified Data.Set as Set
import Database.Curry
type RedisT m = DBMT Value m
data Value
= VString {-# UNPACK #-} !S.ByteString
| VList !(Seq.Seq S.ByteString)
| VSet !(HS.HashSet S.ByteString)
| VHash !(HMS.HashMap S.ByteString S.ByteString)
| VSortedSet {-# UNPACK #-} !SortedSet
type Score = Int32
type SortedSet = (Set.Set (Score, S.ByteString), HMS.HashMap S.ByteString Score)
data Request
= Request [S.ByteString]
deriving (Show)
data Reply
= StatusReply {-# UNPACK #-} !S.ByteString
| ErrorReply {-# UNPACK #-} !S.ByteString
| IntReply {-# UNPACK #-} !Int
| BulkReply !(Maybe S.ByteString)
| MultiBulkReply !(Maybe [Maybe S.ByteString])
deriving (Show)
instance Binary Value where
put (VString bs) = put (0 :: Word8) >> put bs
put (VList ls) = put (1 :: Word8) >> put ls
put (VSet ss) = put (2 :: Word8) >> put (HS.toList ss)
put (VHash ha) = put (3 :: Word8) >> put ha
put (VSortedSet ss) = put (4 :: Word8) >> put ss
get = get >>= \tag -> case (tag :: Word8) of
0 -> VString <$> get
1 -> VList <$> get
2 -> VSet . HS.fromList <$> get
3 -> VHash <$> get
4 -> VSortedSet <$> get
_ -> fail "data corrupted"
|
tanakh/CurryDB
|
Database/Redis/Types.hs
|
bsd-3-clause
| 1,796 | 0 | 13 | 560 | 572 | 317 | 255 | 56 | 0 |
{-# LANGUAGE MagicHash, UnboxedTuples #-}
{-# LANGUAGE GHCForeignImportPrim #-}
{-# LANGUAGE UnliftedFFITypes #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Geometry.Prim.FloatX3
-- Copyright : Copyright (C) 2015 Artem M. Chirkin <[email protected]>
-- License : BSD3
--
-- Maintainer : Artem M. Chirkin <[email protected]>
-- Stability : Experimental
-- Portability :
--
-- Internal package containing only vector primops
--
-----------------------------------------------------------------------------
module Data.Geometry.Prim.FloatX3
( FloatX3#
-- GHC.Prim
, packFloatX3#
, unpackFloatX3#
, broadcastFloatX3#
, plusFloatX3#
, minusFloatX3#
, timesFloatX3#
, divideFloatX3#
, negateFloatX3#
-- Eq
, eqFloatX3#
-- Ord
, ltFloatX3#
, gtFloatX3#
, leFloatX3#
, geFloatX3#
, maxFloatX3#
, minFloatX3#
-- Num
, absFloatX3#
, signumFloatX3#
-- Fractional
, recipFloatX3#
, inverseMFloatX3#
-- Floating
, sqrtFloatX3#
-- Misc
, dotFloatX3#
, transposeMFloatX3#
, detMFloatX3#
, prodMMFloatX3#
, prodMVFloatX3#
) where
import GHC.Exts
type FloatX3# = FloatX4#
-- GHC.Prim
{-# INLINE packFloatX3# #-}
packFloatX3# :: (#Float#, Float#, Float#, Float# #) -> FloatX3#
packFloatX3# = packFloatX4#
{-# INLINE unpackFloatX3# #-}
unpackFloatX3# :: FloatX3# -> (#Float#, Float#, Float#, Float# #)
unpackFloatX3# = unpackFloatX4#
{-# INLINE broadcastFloatX3# #-}
broadcastFloatX3# :: Float# -> FloatX3#
broadcastFloatX3# = broadcastFloatX4#
{-# INLINE plusFloatX3# #-}
plusFloatX3# :: FloatX3# -> FloatX3# -> FloatX3#
plusFloatX3# = plusFloatX4#
{-# INLINE minusFloatX3# #-}
minusFloatX3# :: FloatX3# -> FloatX3# -> FloatX3#
minusFloatX3# = minusFloatX4#
{-# INLINE timesFloatX3# #-}
timesFloatX3# :: FloatX3# -> FloatX3# -> FloatX3#
timesFloatX3# = timesFloatX4#
{-# INLINE divideFloatX3# #-}
divideFloatX3# :: FloatX3# -> FloatX3# -> FloatX3#
divideFloatX3# = divideFloatX4#
{-# INLINE negateFloatX3# #-}
negateFloatX3# :: FloatX3# -> FloatX3#
negateFloatX3# = negateFloatX4#
-- Eq
{-# INLINE eqFloatX3# #-}
foreign import prim "eqFloatX3"
eqFloatX3# :: FloatX3# -> FloatX3# -> Int#
-- Ord
{-# INLINE ltFloatX3# #-}
foreign import prim "ltFloatX3"
ltFloatX3# :: FloatX3# -> FloatX3# -> Int#
{-# INLINE gtFloatX3# #-}
foreign import prim "gtFloatX3"
gtFloatX3# :: FloatX3# -> FloatX3# -> Int#
{-# INLINE leFloatX3# #-}
foreign import prim "leFloatX3"
leFloatX3# :: FloatX3# -> FloatX3# -> Int#
{-# INLINE geFloatX3# #-}
foreign import prim "geFloatX3"
geFloatX3# :: FloatX3# -> FloatX3# -> Int#
{-# INLINE maxFloatX3# #-}
foreign import prim "maxFloatX3"
maxFloatX3# :: FloatX3# -> FloatX3# -> FloatX3#
{-# INLINE minFloatX3# #-}
foreign import prim "minFloatX3"
minFloatX3# :: FloatX3# -> FloatX3# -> FloatX3#
-- Num
{-# INLINE absFloatX3# #-}
foreign import prim "absFloatX3"
absFloatX3# :: FloatX3# -> FloatX3#
{-# INLINE signumFloatX3# #-}
foreign import prim "signumFloatX3"
signumFloatX3# :: FloatX3# -> FloatX3#
-- Fractional
{-# INLINE recipFloatX3# #-}
foreign import prim "recipFloatX3"
recipFloatX3# :: FloatX3# -> FloatX3#
{-# INLINE inverseMFloatX3# #-}
foreign import prim "inverseMFloatX3"
inverseMFloatX3# :: FloatX3# -> FloatX3# -> FloatX3#
-> (# FloatX3#, FloatX3#, FloatX3# #)
-- Floating
{-# INLINE sqrtFloatX3# #-}
foreign import prim "sqrtFloatX3"
sqrtFloatX3# :: FloatX3# -> FloatX3#
-- Misc
{-# INLINE dotFloatX3# #-}
foreign import prim "dotFloatX3"
dotFloatX3# :: FloatX3# -> FloatX3# -> FloatX3#
{-# INLINE transposeMFloatX3# #-}
foreign import prim "transposeMFloatX3"
transposeMFloatX3# :: FloatX3# -> FloatX3# -> FloatX3#
-> (# FloatX3#, FloatX3#, FloatX3# #)
{-# INLINE detMFloatX3# #-}
foreign import prim "detMFloatX3"
detMFloatX3# :: FloatX3# -> FloatX3# -> FloatX3# -> FloatX3#
{-# INLINE prodMMFloatX3# #-}
foreign import prim "prodMMFloatX3"
prodMMFloatX3# :: FloatX3# -> FloatX3# -> FloatX3#
-> FloatX3# -> FloatX3# -> FloatX3#
-> (# FloatX3#, FloatX3#, FloatX3# #)
{-# INLINE prodMVFloatX3# #-}
foreign import prim "prodMVFloatX3"
prodMVFloatX3# :: FloatX3# -> FloatX3# -> FloatX3#
-> FloatX3#
-> FloatX3#
|
achirkin/fastvec
|
src/Data/Geometry/Prim/FloatX3.hs
|
bsd-3-clause
| 4,513 | 4 | 12 | 932 | 699 | 426 | 273 | -1 | -1 |
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE RecordWildCards #-}
module Sector where
import Prelude hiding (any, floor, ceiling, (.), id)
import Control.Applicative
import Data.Ord (comparing)
import Control.Category
import Control.Lens hiding (indices)
import Data.Foldable (any)
import Data.Int (Int32)
import Data.Monoid ((<>))
import Foreign (Storable(..), castPtr, nullPtr, plusPtr)
import Foreign.C (CFloat)
import Graphics.Rendering.OpenGL (($=))
import Linear as L
import qualified Data.IntMap.Strict as IM
import qualified Data.Vector as V
import qualified Data.Vector.Storable as SV
import qualified Graphics.Rendering.OpenGL as GL
import Geometry
import Material
import Shader
data Vertex =
Vertex {vPos :: {-# UNPACK #-} !(V3 CFloat)
,vNorm :: {-# UNPACK #-} !(V3 CFloat)
,vTangent :: {-# UNPACK #-} !(V3 CFloat)
,vBitangent :: {-# UNPACK #-} !(V3 CFloat)
,vUV :: {-# UNPACK #-} !(V2 CFloat)}
deriving (Show)
instance Storable Vertex where
sizeOf ~(Vertex p n t bn uv) = sizeOf p + sizeOf n + sizeOf t + sizeOf bn +
sizeOf uv
alignment _ = 0
peek ptr =
Vertex <$>
peek (castPtr ptr) <*>
peek (castPtr $ ptr `plusPtr`
sizeOf (vPos undefined)) <*>
peek (castPtr $ ptr `plusPtr`
sizeOf (vPos undefined) `plusPtr`
sizeOf (vNorm undefined)) <*>
peek (castPtr $ ptr `plusPtr`
sizeOf (vPos undefined) `plusPtr`
sizeOf (vNorm undefined) `plusPtr`
sizeOf (vTangent undefined)) <*>
peek (castPtr $ ptr `plusPtr`
sizeOf (vPos undefined) `plusPtr`
sizeOf (vNorm undefined) `plusPtr`
sizeOf (vTangent undefined) `plusPtr`
sizeOf (vBitangent undefined))
poke ptr (Vertex p n t bn uv) =
do poke (castPtr $ ptr) p
poke (castPtr $ ptr `plusPtr` sizeOf p) n
poke (castPtr $ ptr `plusPtr` sizeOf p `plusPtr` sizeOf n) t
poke (castPtr $ ptr `plusPtr` sizeOf p `plusPtr` sizeOf n `plusPtr`
sizeOf t)
bn
poke (castPtr $ ptr `plusPtr` sizeOf p `plusPtr` sizeOf n `plusPtr`
sizeOf t `plusPtr` sizeOf bn)
uv
data Blueprint =
Blueprint {blueprintVertices :: IM.IntMap (V2 CFloat)
,blueprintWalls :: V.Vector (Int,Int)
,blueprintFloor :: CFloat
,blueprintCeiling :: CFloat
,blueprintFloorMaterial :: Material
,blueprintCeilingMaterial :: Material
,blueprintWallMaterial :: Material}
data Sector =
Sector {sectorDrawWalls :: IO ()
,sectorDrawFloor :: IO ()
,sectorDrawCeiling :: IO ()
,sectorFloorMaterial :: Material
,sectorCeilingMaterial :: Material
,sectorWallMaterial :: Material}
rayLineIntersection :: (Epsilon a,Fractional a,Ord a)
=> V2 a -> V2 a -> V2 a -> V2 a -> Maybe (V2 a)
rayLineIntersection p r q q' =
let s = q' - q
cross (V2 a b) (V2 x y) = a * y - b * x
pToQ = q - p
tNum = pToQ `cross` s
uNum = pToQ `cross` r
in case r `cross` s of
denom
| nearZero denom -> Nothing
| otherwise ->
let u = uNum / denom
t = tNum / denom
in if 0 <= u && u <= 1
then Just (p + r ^* t)
else Nothing
makeSimple :: (Epsilon a,Fractional a,Ord a)
=> V.Vector (V2 a) -> V.Vector (V2 a) -> V.Vector (V2 a)
makeSimple inner outer =
let xMost = comparing (view _x)
m = V.maximumBy xMost inner
mIndex = V.maxIndexBy xMost inner
edges = V.zip outer (V.tail outer <> outer)
intersections =
V.map (\(start,end) ->
((rayLineIntersection m
(V2 1 0)
start
end)
,start
,end))
edges
(Just i,start,end) =
V.minimumBy
(\(x,_,_) (y,_,_) ->
case (x,y) of
(Nothing,Nothing) -> EQ
(Just _,Nothing) -> LT
(Nothing,Just _) -> GT
(Just a,Just b) ->
comparing (qd m) a b)
intersections
p =
V.maximumBy xMost
[start,end]
containing =
V.filter (pointInTriangle m i p .
snd) $
V.filter (not . nearZero .
(subtract p) .
snd) $
V.imap (,) outer
isReflex _ = True
angleAgainstM =
dot (V2 1 0) .
subtract m
(minimalReflex,_) =
V.minimumBy (comparing (angleAgainstM . snd))
(V.filter (isReflex . snd) containing)
in if V.null containing
then undefined
else case V.splitAt minimalReflex outer of
(before,after) ->
before <>
V.take 1 after <>
V.take (succ (V.length inner))
(V.drop mIndex inner <>
inner) <>
after
triangulate :: (Epsilon a, Fractional a, Ord a) => V.Vector (V2 a) -> V.Vector Int
triangulate = collapseAndTriangulate
where collapseAndTriangulate vs = go $ addIndices vs
takeFirst f =
V.take 1 .
V.filter f
isEar ((_,a),(_,b),(_,c),otherVertices) =
let area = triangleArea a b c
containsOther =
any (pointInTriangle a b c .
snd)
otherVertices
in area > 0 && not containsOther
go s
| V.length s < 3 = empty
| otherwise =
do (v0@(n0,_),(n1,_),v2@(n2,_),others) <- takeFirst isEar (separate s)
[n0,n2,n1] <>
go (v0 `V.cons`
(v2 `V.cons` others))
addIndices vertices =
V.zip [0 .. V.length vertices] vertices
separate vertices =
let n = V.length vertices
doubleVerts = vertices <> vertices
in V.zip4 vertices
(V.drop 1 doubleVerts)
(V.drop 2 doubleVerts)
(V.imap (\i _ ->
V.take (n - 3) $
V.drop (i + 3) $
doubleVerts)
vertices)
-- collapse vs =
-- V.map (\i ->
-- let v = vs V.! i
-- in fst $ V.head $ V.filter (nearZero . (v -) . snd) $ V.imap (,) vs)
buildSector :: Blueprint -> IO Sector
buildSector Blueprint{..} =
do vao <- initializeVAO
initializeVBO
configureVertexAttributes
initializeIBO
return $
Sector {sectorDrawWalls =
do GL.bindVertexArrayObject $=
Just vao
GL.drawElements GL.Triangles
(fromIntegral $ V.length wallIndices)
GL.UnsignedInt
nullPtr
,sectorDrawFloor =
do GL.bindVertexArrayObject $=
Just vao
GL.drawElements
GL.Triangles
(fromIntegral $ V.length floorIndices)
GL.UnsignedInt
(nullPtr `plusPtr`
fromIntegral
(sizeOf (0 :: Int32) *
V.length wallIndices))
,sectorDrawCeiling =
do GL.bindVertexArrayObject $=
Just vao
GL.drawElements
GL.Triangles
(fromIntegral $ V.length ceilingIndices)
GL.UnsignedInt
(nullPtr `plusPtr`
fromIntegral
(sizeOf (0 :: Int32) *
(V.length wallIndices + V.length floorIndices)))
,sectorWallMaterial = blueprintWallMaterial
,sectorFloorMaterial = blueprintFloorMaterial
,sectorCeilingMaterial = blueprintCeilingMaterial}
where initializeVAO =
do vao <- GL.genObjectName :: IO (GL.VertexArrayObject)
GL.bindVertexArrayObject $=
Just vao
return vao
initializeVBO =
do vbo <- GL.genObjectName
GL.bindBuffer GL.ArrayBuffer $=
Just vbo
let vertices = wallVertices <> floorVertices <> ceilingVertices
SV.unsafeWith (V.convert vertices) $
\verticesPtr ->
GL.bufferData GL.ArrayBuffer $=
(fromIntegral
(V.length vertices *
sizeOf (undefined :: Vertex))
,verticesPtr
,GL.StaticDraw)
configureVertexAttributes =
do let stride =
fromIntegral $
sizeOf (undefined :: Vertex)
normalOffset =
fromIntegral $
sizeOf (0 :: V3 CFloat)
tangentOffset =
normalOffset +
fromIntegral (sizeOf (0 :: V3 CFloat))
bitangentOffset =
tangentOffset +
fromIntegral (sizeOf (0 :: V3 CFloat))
uvOffset =
bitangentOffset +
fromIntegral (sizeOf (0 :: V3 CFloat))
GL.vertexAttribPointer positionAttribute $=
(GL.ToFloat,GL.VertexArrayDescriptor 3 GL.Float stride nullPtr)
GL.vertexAttribArray positionAttribute $= GL.Enabled
GL.vertexAttribPointer normalAttribute $=
(GL.ToFloat
,GL.VertexArrayDescriptor 3
GL.Float
stride
(nullPtr `plusPtr` normalOffset))
GL.vertexAttribArray normalAttribute $= GL.Enabled
GL.vertexAttribPointer tangentAttribute $=
(GL.ToFloat
,GL.VertexArrayDescriptor 3
GL.Float
stride
(nullPtr `plusPtr` tangentOffset))
GL.vertexAttribArray tangentAttribute $= GL.Enabled
GL.vertexAttribPointer bitangentAttribute $=
(GL.ToFloat
,GL.VertexArrayDescriptor 3
GL.Float
stride
(nullPtr `plusPtr` bitangentOffset))
GL.vertexAttribArray bitangentAttribute $= GL.Enabled
GL.vertexAttribPointer uvAttribute $=
(GL.ToFloat
,GL.VertexArrayDescriptor 2
GL.Float
stride
(nullPtr `plusPtr` uvOffset))
GL.vertexAttribArray uvAttribute $= GL.Enabled
textureScaleFactor = 8.0e-2
wallVertices =
V.concatMap
(\(s,e) ->
expandEdge (blueprintVertices IM.! s)
(blueprintVertices IM.! e))
blueprintWalls
where expandEdge start@(V2 x1 y1) end@(V2 x2 y2) =
let wallV = end ^-^ start
wallLen = norm wallV
scaledLen = wallLen * textureScaleFactor
n =
case perp (wallV ^* recip wallLen) of
V2 x y -> V3 x 0 y
v =
(blueprintCeiling - blueprintFloor) *
textureScaleFactor
in V.fromList $ getZipList $ Vertex <$>
ZipList [V3 x1 blueprintFloor y1
,V3 x1 blueprintCeiling y1
,V3 x2 blueprintFloor y2
,V3 x2 blueprintCeiling y2] <*>
ZipList (repeat n) <*>
ZipList (repeat $
case n of
V3 x 0 y ->
V3 y 0 x) <*>
ZipList (repeat $
V3 0 (-1) 0) <*>
ZipList [V2 0 0,V2 0 v,V2 scaledLen 0,V2 scaledLen v]
wallIndices =
V.concatMap id $
V.imap (\m _ ->
let n = m * 4
in V.map fromIntegral [n,n + 2,n + 1,n + 1,n + 2,n + 3])
blueprintWalls
floorVertices =
V.map (\(V2 x y) ->
Vertex (V3 x blueprintFloor y)
(V3 0 1 0)
(V3 1 0 0)
(V3 0 0 1)
(V2 x y ^*
textureScaleFactor))
(V.fromList $ IM.elems blueprintVertices)
ceilingVertices =
V.map (\(Vertex p n t bn uv) ->
Vertex (p & _y .~ blueprintCeiling)
(negate n)
t
bn
uv)
floorVertices
floorIndices =
let n = fromIntegral $ V.length wallVertices
in fmap (fromIntegral . (+ n)) $
triangulate (V.fromList $ IM.elems blueprintVertices)
ceilingIndices =
let reverseTriangles v =
case V.splitAt 3 v of
(h,t)
| V.length h == 3 ->
[h V.! 0,h V.! 2,h V.! 1] V.++
reverseTriangles t
_ -> []
in V.map (+ (fromIntegral $ V.length floorVertices))
(reverseTriangles floorIndices)
initializeIBO =
do let indices :: V.Vector Int32
indices = wallIndices <> floorIndices <> ceilingIndices
ibo <- GL.genObjectName
GL.bindBuffer GL.ElementArrayBuffer $=
Just ibo
SV.unsafeWith (V.convert indices) $
\indicesPtr ->
GL.bufferData GL.ElementArrayBuffer $=
(fromIntegral
(V.length indices *
sizeOf (0 :: Int32))
,indicesPtr
,GL.StaticDraw)
drawSectorTextured :: Sector -> IO ()
drawSectorTextured Sector{..} =
do activateMaterial sectorWallMaterial
sectorDrawWalls
activateMaterial sectorFloorMaterial
sectorDrawFloor
activateMaterial sectorCeilingMaterial
sectorDrawCeiling
|
ekmett/hadoom
|
Sector.hs
|
bsd-3-clause
| 14,848 | 0 | 21 | 6,683 | 4,025 | 2,090 | 1,935 | 371 | 5 |
{-# LANGUAGE GADTs, RankNTypes #-}
module Text.Derp
( -- * Data Types
Parser, Token(..)
, -- * Parser construction
(<|>), (<~>), (==>), nul, ter, eps, emp
, -- * Parser computation steps
derive, compact, parseNull
, -- * Full parsing and result extraction
defaultCompactSteps, compactNum, deriveStepNum, runParseNum
, runParseStagesNum, runParseStages
, runParseLongestMatchNum, runParseLongestMatch
, deriveStep, runParse
, -- * Demos
xsR, xsL, xsIn, parens, parensIn, amb, ambIn, sexp, sexpIn
, someStuff, someStuffG
) where
import Data.Maybe
import Control.Monad
import Data.Char
import Data.Function
import Data.IORef
import Data.List
import Data.Map (Map)
import System.IO.Unsafe
import System.Mem.StableName
import Text.Printf
import Unsafe.Coerce
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
-- | Represents both a formal context-free language and the
-- reduction of a member of that language to a value of type `a'.
-- Languages range of `Token' values.
data Parser t a = Parser
{ parserRec :: ParserRec Parser t a
, parserNullable :: FPValue Bool
, parserEmpty :: FPValue Bool
, parserDerive :: Token t -> Parser t a
, parserCompact :: Parser t a
}
data ParserRec p t a where
Alt :: (Ord t, Ord a) => p t a -> p t a -> ParserRec p t a
Con :: (Ord t, Ord a, Ord b) => p t a -> p t b -> ParserRec p t (a, b)
Red :: (Ord t, Ord a, Ord b) => (Set a -> Set b) -> p t a -> ParserRec p t b
Nul :: (Ord t, Ord a) => p t a -> ParserRec p t a
Zip :: (Ord t, Ord a, Ord b) => p t a -> ContextR p t a b -> ParserRec p t b
Ter :: (Ord t) => Set t -> ParserRec p t String
Eps :: (Ord t, Ord a) => Set a -> ParserRec p t a
Emp :: (Ord t, Ord a) => ParserRec p t a
data ContextR p t a b where
ConContext :: (Ord t, Ord a, Ord b) => p t b -> ContextR p t (a, b) c -> ContextR p t a c
RedContext :: (Ord t, Ord a, Ord b) => (Set a -> Set b) -> ContextR p t b c -> ContextR p t a c
TopContext :: (Ord t, Ord a) => ContextR p t a a
type Context t a b = ContextR Parser t a b
data Token t = Token { tokenClass :: t, tokenValue :: String }
deriving (Eq, Ord, Show)
-- | The input type for parsing. For example the parser:
--
-- > ter "x"
--
-- will parse:
--
-- > Token "x" "foo"
--
-- into:
--
-- > eps "foo"
parser :: (Ord t, Ord a) => ParserRec Parser t a -> FPValue Bool -> FPValue Bool -> Parser t a
parser p n e = fix $ \ self -> Parser p n e (memoFun (deriveImp self)) (compactImp self)
-- | Alternation.
(<|>) :: (Ord t, Ord a) => Parser t a -> Parser t a -> Parser t a
(<|>) a b = parser (Alt a b) FPUndecided FPUndecided
-- | Concatenation.
(<~>) :: (Ord t, Ord a, Ord b) => Parser t a -> Parser t b -> Parser t (a, b)
(<~>) a b = parser (Con a b) FPUndecided FPUndecided
-- | Reduction.
(==>) :: (Ord t, Ord a, Ord b) => Parser t a -> (a -> b) -> Parser t b
(==>) p f = p ==>| Set.map f
-- | Set generalized version of `==>'.
(==>|) :: (Ord t, Ord a, Ord b) => Parser t a -> (Set a -> Set b) -> Parser t b
(==>|) p f = parser (Red f p) FPUndecided FPUndecided
-- | Null-parse extraction.
nul :: (Ord t, Ord a) => Parser t a -> Parser t a
nul p = parser (Nul p) FPUndecided FPUndecided
-- | One-hole-context focus.
pzip :: (Ord t, Ord a, Ord b) => Parser t a -> Context t a b -> Parser t b
pzip p c = parser (Zip p c) (FPDecided False) (FPDecided False)
-- | Terminal.
ter :: (Ord t) => t -> Parser t String
ter = terM . Set.singleton
-- | Set generalized version of `ter'.
terM :: (Ord t) => Set t -> Parser t String
terM tM = parser (Ter tM) (FPDecided False) (FPDecided False)
-- | Epsilon/empty-string.
eps :: (Ord t, Ord a) => a -> Parser t a
eps = epsM . Set.singleton
-- | Set generalized version of `eps'.
epsM :: (Ord t, Ord a) => Set a -> Parser t a
epsM e = parser (Eps e) (FPDecided True) (FPDecided False)
-- | The empty language.
emp :: (Ord t, Ord a) => Parser t a
emp = parser Emp (FPDecided False) (FPDecided True)
infixr 3 <~>
infixr 1 <|>
infix 2 ==>, ==>|
-- | Kleene Star
star :: (Ord t, Ord a) => Parser t a -> Parser t [a]
star p = r
where
r = eps [] <|> p <~> r ==> uncurry (:)
star1 :: (Ord t, Ord a) => Parser t a -> Parser t [a]
star1 p = p <~> star p ==> uncurry (:)
option :: (Ord t, Ord a) => Parser t a -> Parser t (Maybe a)
option p = r
where
r = eps Nothing <|> p ==> Just
terS :: (Ord t) => [t] -> Parser t String
terS ts = m ts ==> concat
where
m [] = eps []
m (a:as) = ter a <~> m as ==> uncurry (:)
-- | The main derivative function.
derive :: Parser t a -> Token t -> Parser t a
derive = parserDerive
deriveImp :: Parser t a -> Token t -> Parser t a
deriveImp p' x' = deriveImpRec (parserRec p') x'
where
deriveImpRec (Alt a b) x = derive a x <|> derive b x
deriveImpRec (Con a b) x = derive a x <~> b <|> nul a <~> derive b x
deriveImpRec (Red f a) x = derive a x ==>| f
deriveImpRec (Nul _) _ = emp
deriveImpRec (Zip p c) t = pzip (derive p t) c
deriveImpRec (Ter c) (Token x t) | x `Set.member` c = eps t | otherwise = emp
deriveImpRec (Eps _) _ = emp
deriveImpRec Emp _ = emp
-- | The optimization step of the algorithm.
compact :: Parser t a -> Parser t a
compact = parserCompact
compactImp :: (Ord t, Ord a) => Parser t a -> Parser t a
compactImp p = compactImpRec $ parserRec p
where
compactImpRec (Alt (Parser Emp _ _ _ _) (Parser Emp _ _ _ _)) = emp
compactImpRec (Alt (Parser Emp _ _ _ _) b) = compact b
compactImpRec (Alt a (Parser Emp _ _ _ _)) = compact a
compactImpRec (Alt (Parser (Eps sM) _ _ _ _) (Parser (Eps tM) _ _ _ _)) = epsM (sM `Set.union` tM)
compactImpRec (Alt a b) = (compact a <|> compact b)
{ parserNullable = parserNullable a <||> parserNullable b
, parserEmpty = parserEmpty a <&&> parserEmpty b
}
compactImpRec (Con (Parser Emp _ _ _ _) _) = emp
compactImpRec (Con _ (Parser Emp _ _ _ _)) = emp
compactImpRec (Con (Parser (Eps sM) _ _ _ _) b) = compact b ==>| (\ xM -> Set.fromList [ (s, x) | s <- Set.toList sM, x <- Set.toList xM ])
compactImpRec (Con a (Parser (Eps sM) _ _ _ _)) = compact a ==>| (\ xM -> Set.fromList [ (x, s) | x <- Set.toList xM, s <- Set.toList sM ])
compactImpRec (Con a b)
| parserNullable a == FPDecided False && parserNullable b == FPDecided False
&& parserEmpty a == FPDecided False && parserEmpty b == FPDecided False =
pzip (compact a) (ConContext (compact b) TopContext)
compactImpRec (Con a b) = (compact a <~> compact b)
{ parserNullable = parserNullable a <&&> parserNullable b
, parserEmpty = parserEmpty a <||> parserEmpty b
}
compactImpRec (Red _ (Parser Emp _ _ _ _)) = emp
compactImpRec (Red f (Parser (Eps sM) _ _ _ _)) = epsM (f sM)
compactImpRec (Red f (Parser (Red g a) _ _ _ _)) = compact a ==>| f . g
compactImpRec (Red f a) = (compact a ==>| f)
{ parserNullable = parserNullable a
, parserEmpty = parserEmpty a
}
compactImpRec (Nul (Parser (Con a b) _ _ _ _)) = nul (compact a) <~> nul (compact b)
compactImpRec (Nul (Parser (Alt a b) _ _ _ _)) = nul (compact a) <|> nul (compact b)
compactImpRec (Nul (Parser (Red f a) _ _ _ _)) = nul (compact a) ==>| f
compactImpRec (Nul (Parser (Zip a c) _ _ _ _)) = pzip (nul a) (nulContext c)
compactImpRec (Nul a@(Parser (Nul _) _ _ _ _)) = compact a
compactImpRec (Nul (Parser (Eps sM) _ _ _ _)) = epsM sM
compactImpRec (Nul (Parser (Ter _) _ _ _ _)) = emp
compactImpRec (Nul (Parser Emp _ _ _ _)) = emp
compactImpRec (Zip a TopContext) = compact a
compactImpRec (Zip (Parser Emp _ _ _ _) _) = emp
compactImpRec (Zip a c) | parserNullable a /= FPDecided False = unfoldOne (compact a) c
compactImpRec (Zip (Parser (Zip a c) _ _ _ _) d) = pzip (compact a) (thread c d)
compactImpRec (Zip (Parser (Red f a) _ _ _ _) c) = pzip (compact a) (RedContext f c)
compactImpRec (Zip a c) = pzip (compact a) c
compactImpRec (Ter _) = p
compactImpRec (Eps sM) | sM == Set.empty = emp
compactImpRec (Eps _) = p
compactImpRec Emp = p
nulContext :: Context t a b -> Context t a b
nulContext (ConContext a c) = ConContext (nul a) (nulContext c)
nulContext (RedContext f c) = RedContext f (nulContext c)
nulContext TopContext = TopContext
thread :: (Ord t, Ord a, Ord b, Ord c) => Context t a b -> Context t b c -> Context t a c
thread TopContext d = d
thread (RedContext f c) d = RedContext f (thread c d)
thread (ConContext a c) d = ConContext a (thread c d)
unfoldOne :: (Ord t, Ord a, Ord b) => Parser t a -> Context t a b -> Parser t b
unfoldOne a (ConContext b c) = pzip (a <~> b) c
unfoldOne a (RedContext f c) = unfoldOne (a ==>| f) c
unfoldOne _ TopContext = error "cannot unfold top"
-- | Extract the parse-null set of a parser.
parseNull :: (Ord t, Ord a) => Parser t a -> Set a
parseNull p = work $ nul p
where
work (Parser (Eps sM) _ _ _ _) = sM
work (Parser Emp _ _ _ _) = Set.empty
work other = work $ compact other
-- running parsers
-- | A specified number of compactions.
compactNum :: Int -> Parser t a -> Parser t a
compactNum 0 p = p
compactNum n p = compactNum (n - 1) (compact p)
-- | Derivation followed by a specified number of compactions.
deriveStepNum :: Int -> Parser t a -> Token t -> Parser t a
deriveStepNum n p i = compactNum n $ derive p i
-- | Parse using a specified number of intermediate compactions.
runParseNum :: (Ord t, Ord a) => Int -> Parser t a -> [Token t] -> Set a
runParseNum _ (Parser Emp _ _ _ _) _ = Set.empty
runParseNum _ p [] = parseNull p
runParseNum n p (i:is) = runParseNum n (deriveStepNum n p i) is
-- | The number of compact steps that usually keeps a parser constant in size
-- while parsing.
defaultCompactSteps :: Int
defaultCompactSteps = 10
-- | Derivation followed by the default number of compactions.
deriveStep :: Parser t a -> Token t -> Parser t a
deriveStep = deriveStepNum defaultCompactSteps
-- | Parse using the default number of intermediate compactions. This is the
-- main parsing function. Examples:
--
-- > let e = ter "num"
-- > <|> e <~> ter "+" <~> e ==> (\(x1,(o,x2)) -> "(" ++ x1 ++ o ++ x2 ++ ")")
-- > in runParse e [Token "num" "1", Token "+" "+", Token "num" 3", Token "+" "+", Token "num" "5"]
--
-- evaluates to:
--
-- > Set.fromList ["((1+3)+5)", "(1+(3+5))"]
--
-- > let e = ter "num" ==> read
-- > <|> e <~> ter "+" <~> e ==> (\(x1,(_,x2)) -> x1 + x2)
-- > in runParse e [Token "num" "1", Token "+" "+", Token "num" 3", Token "+" "+", Token "num" "5"]
--
-- evaluates to:
--
-- > Set.fromList [9]
--
runParse :: (Ord t, Ord a) => Parser t a -> [Token t] -> Set a
runParse = runParseNum defaultCompactSteps
runParseStagesNum :: (Ord t, Ord a) => Int -> Parser t a -> [Token t] -> [(Parser t a, Set a, [Token t])]
runParseStagesNum n p input = ((p, parseNull p, input) :) $
case input of
[] -> []
(i:is) -> runParseStagesNum n (deriveStepNum n p i) is
runParseStages :: (Ord t, Ord a) => Parser t a -> [Token t] -> [(Parser t a, Set a, [Token t])]
runParseStages = runParseStagesNum defaultCompactSteps
runParseLongestMatchNum :: (Ord t, Ord a) => Int -> Parser t a -> [Token t] -> Maybe (Int, Set a, [Token t])
runParseLongestMatchNum n p input = findLongestMatch 0 $ runParseStagesNum n p input
where
findLongestMatch _ [] = Nothing
findLongestMatch _ ((Parser Emp _ _ _ _, _, _):_) = Nothing
findLongestMatch l ((_, np, ts):others) = case findLongestMatch (l + 1) others of
(Just result) -> Just result
Nothing
| np == Set.empty -> Nothing
| otherwise -> Just (l, np, ts)
runParseLongestMatch :: (Ord t, Ord a) => Parser t a -> [Token t] -> Maybe (Int, Set a, [Token t])
runParseLongestMatch = runParseLongestMatchNum defaultCompactSteps
-- inspecting parsers
parserChildren :: Parser t a -> [GenParser]
parserChildren = parserRecChildren . parserRec
where
parserRecChildren (Con a b) = [genParser a, genParser b]
parserRecChildren (Alt a b) = [genParser a, genParser b]
parserRecChildren (Red _ a) = [genParser a]
parserRecChildren (Nul a) = [genParser a]
parserRecChildren (Zip a _) = [genParser a]
parserRecChildren (Ter _) = []
parserRecChildren (Eps _) = []
parserRecChildren Emp = []
foldlParserChildrenM :: (forall t b. c -> Parser t b -> IO c) -> c -> Parser t2 a -> IO c
foldlParserChildrenM f i p = foldM g i $ parserChildren p
where
g t (GenParser h) = h (f t)
newtype GenParser = GenParser { unGenParser :: forall c. (forall t b. Parser t b -> c) -> c }
genParser :: Parser t a -> GenParser
genParser p = GenParser $ \ f -> f p
runGenParser :: (forall t b. Parser t b -> c) -> GenParser -> c
runGenParser f g = unGenParser g f
data ParserRecType = ConType | AltType | RedType | NulType | ZipType | TerType | EpsType | EmpType
deriving (Eq, Ord, Show)
parserType :: Parser t a -> ParserRecType
parserType = parserRecType . parserRec
where
parserRecType (Con _ _) = ConType
parserRecType (Alt _ _) = AltType
parserRecType (Red _ _) = RedType
parserRecType (Nul _) = NulType
parserRecType (Zip _ _) = ZipType
parserRecType (Ter _) = TerType
parserRecType (Eps _) = EpsType
parserRecType Emp = EmpType
type ParserInspect b = (forall t a. Parser t a -> IO Integer)
-> (forall t a. Parser t a -> IO Bool)
-> (forall t a. Parser t a -> IO b)
inspectParser :: ParserInspect b -> Parser t a -> b
inspectParser f p = unsafePerformIO $ do
reifiedPt <- newIORef Map.empty
seenPt <- newIORef Map.empty
uidPt <- newIORef 1
f (lookupId reifiedPt uidPt) (seenId seenPt) p
lookupId :: IORef (Map Int [(StableName (), Integer)])
-> IORef Integer
-> Parser t a
-> IO Integer
lookupId reifiedPt uidPt p
| p `seq` True = do
stblName <- genericStableName p
let stblNameHashed = hashStableName stblName
lookupValM <- liftM (extraLookup stblNameHashed stblName) $ readIORef reifiedPt
case lookupValM of
(Just lookupVal) -> return lookupVal
Nothing -> do
thisId <- readIORef uidPt
modifyIORef uidPt (+ 1)
modifyIORef reifiedPt $ Map.insertWith (++) stblNameHashed [(stblName, thisId)]
return thisId
| otherwise = error "seq failed"
seenId :: IORef (Map Int [(StableName (), ())]) -> Parser t a -> IO Bool
seenId seenPt p
| p `seq` True = do
stblName <- genericStableName p
let stblNameHashed = hashStableName stblName
lookupValM <- liftM (extraLookup stblNameHashed stblName) $ readIORef seenPt
case lookupValM of
(Just ()) -> return True
Nothing -> do
modifyIORef seenPt $ Map.insertWith (++) stblNameHashed [(stblName, ())]
return False
| otherwise = error "seq failed"
genericStableName :: a -> IO (StableName ())
genericStableName = liftM unsafeCoerce . makeStableName
extraLookup :: Int -> StableName () -> Map Int [(StableName (), a)] -> Maybe a
extraLookup hashed key m = process $ Map.lookup hashed m
where
process x = case x of
(Just []) -> Nothing
(Just ((key', reified):xs)) | key == key' -> Just reified
| otherwise -> process (Just xs)
Nothing -> Nothing
type ParserFoldL b = forall t a. b -> Parser t a -> Integer -> Integer -> [Integer] -> b
parserDeepFoldL :: ParserFoldL b -> b -> Parser t a -> b
parserDeepFoldL f i = inspectParser $ inspectf f i
inspectf :: ParserFoldL t -> t -> ParserInspect t
inspectf f i uidM isSeenM p = do
isSeen <- isSeenM p
if isSeen then return i else do
uid <- uidM p
cuids <- mapM (runGenParser uidM) $ parserChildren p
let pid = hashStableName (unsafePerformIO (genericStableName p))
let next = f i p uid (fromIntegral pid) cuids
foldlParserChildrenM (\t p' -> inspectf f t uidM isSeenM p') next p
data ParserInfo = ParserInfo Integer -- uid
Integer -- pid
ParserRecType -- type
(FPValue Bool) -- nullable
[Integer] -- children
parserToGraph :: Parser t a -> [ParserInfo]
parserToGraph = reverse . parserDeepFoldL f []
where
f :: ParserFoldL [ParserInfo]
f others p uid pid childrenids = ParserInfo uid
pid
(parserType p)
(parserNullable p)
childrenids
: others
showParserGraph :: [ParserInfo] -> String
showParserGraph ps = printf "SIZE: %s \n" (show (length ps)) ++ intercalate "\n" (map showParserGraphSingle ps)
where
showParserGraphSingle :: ParserInfo -> String
showParserGraphSingle (ParserInfo uid pid ptype n children) =
printf "%-6s%-6s%-10s%-10s%-10s"
(show uid)
(show pid)
(show ptype)
(showFPBool n)
(show children)
parserSize :: Parser t a -> Int
parserSize = parserDeepFoldL f 0
where
f :: ParserFoldL Int
f n _ _ _ _ = n + 1
instance Show (Parser t a) where
show = showParserGraph . parserToGraph
-- FPValue
data FPValue a = FPDecided a | FPUndecided
deriving (Eq, Ord, Show)
showFPBool :: FPValue Bool -> String
showFPBool (FPDecided True) = "True"
showFPBool (FPDecided False) = "False"
showFPBool FPUndecided = "Undecided"
(<&&>) :: FPValue Bool -> FPValue Bool -> FPValue Bool
(<&&>) (FPDecided False) _ = FPDecided False
(<&&>) _ (FPDecided False) = FPDecided False
(<&&>) FPUndecided _ = FPUndecided
(<&&>) _ FPUndecided = FPUndecided
(<&&>) (FPDecided x) (FPDecided y) = FPDecided (x && y)
(<||>) :: FPValue Bool -> FPValue Bool -> FPValue Bool
(<||>) (FPDecided True) _ = FPDecided True
(<||>) _ (FPDecided True) = FPDecided True
(<||>) FPUndecided _ = FPUndecided
(<||>) _ FPUndecided = FPUndecided
(<||>) (FPDecided x) (FPDecided y) = FPDecided (x || y)
-- util
memoFun :: (Ord a) => (a -> b) -> a -> b
memoFun f = unsafePerformIO $ do
mapRef <- newIORef Map.empty
return $ \a -> unsafePerformIO $ do
currMap <- readIORef mapRef
let vM = Map.lookup a currMap
case vM of
Just b -> return b
Nothing -> do
let b = f a
writeIORef mapRef $ Map.insert a b currMap
return b
-- demos
xsR :: () -> Parser String String
xsR () = p
where
p = eps "" <|> ter "x" <~> p ==> uncurry (++)
xsL :: () -> Parser String String
xsL () = p
where
p = eps "" <|> p <~> ter "x" ==> uncurry (++)
xsIn :: [Token String]
xsIn = replicate 60 (Token "x" "x")
parens :: () -> Parser String String
parens () = p
where
p = eps "" <|> ter "(" <~> p <~> ter ")" ==> (\(s1,(s2,s3)) -> s1 ++ s2 ++ s3)
parensIn :: [Token String]
parensIn = replicate 80 (Token "(" "(") ++ replicate 80 (Token ")" ")")
amb :: () -> Parser String String
amb () = p
where
p = ter "1" <|> p <~> ter "+" <~> p ==> (\(s1,(s2,s3)) -> "(" ++ s1 ++ s2 ++ s3 ++ ")")
ambIn :: [Token String]
ambIn = intersperse (Token "+" "+") (replicate 7 (Token "1" "1"))
sexp :: () -> Parser String String
sexp () = p
where
p = ter "(" <~> pl <~> ter ")" ==> (\(s1,(s2,s3)) -> s1 ++ s2 ++ s3) <|> ter "s"
pl = pl <~> p ==> uncurry (++) <|> eps ""
sexpIn :: [Token String]
sexpIn = map (\x -> Token x x) $ words "( s ( s ( s s ( s s s ( s s s ( s ) ( s s ) s s ) s s ) s ) s ) )"
makeSExpIn :: Int -> [Token String]
makeSExpIn n = map (\x -> Token x x) . words $ "( " ++ build n "s" ++ " )"
where
build 0 x = x
build n s = build (n - 1) s'
where
s' = "s ( " ++ s ++ " )"
someStuff :: [Token String]
someStuff = map (\x -> Token x x) $ words "x x x x y y y x x"
someStuffG :: () -> Parser String String
someStuffG () = p
where
p = eps "" <|> p <~> ter "x" ==> uncurry (++)
nilsE :: () -> Parser String ()
nilsE () = expr
where
expr = op <|> atom
op = expr <~> internal ==> const ()
atom = ter "x" ==> const ()
internal = ter "[" <~> expr <~> ter "]" ==> const ()
exprIn :: Int -> [String]
exprIn n =
foldr (.) id
(replicate n (\s -> ("x" :) . ("[" :) . s . ("]" :)))
("x" :)
[]
exprIn2 :: [String]
exprIn2 = words "x [ x ] [ x ]"
-- lexing
stepParsers :: (Ord t, Ord a) => [Parser t a] -> [Token t] -> [(Int, Set a, [Token t])]
stepParsers ps ts = catMaybes $ map (flip runParseLongestMatch ts) ps
longestFirstMatch :: [(Int, Set a, [Token t])] -> Maybe (a, [Token t])
longestFirstMatch rs = fmap extract $ foldl pick Nothing rs
where
pick Nothing s = Just s
pick tM@(Just (tlen, _, _)) c@(clen, _, _) | clen > tlen = Just c
| otherwise = tM
extract (_, res, con) = (Set.toList res !! 0, con)
fullLex :: (Show t, Ord t, Ord a) => [Parser t a] -> [Token t] -> Either String [a]
fullLex ps [] = Right []
fullLex ps ts = case longestFirstMatch (stepParsers ps ts) of
Nothing -> Left $ printf "cannot parse: %s" (show ts)
Just (r, ts') -> fmap (r :) $ fullLex ps ts'
charToken :: Char -> Token Char
charToken c = Token c [c]
-- sizes
reportSizes :: Parser t a -> [Token t] -> String
reportSizes = reportSizesN 0
reportSizesN :: Int -> Parser t a -> [Token t] -> String
reportSizesN _ _ [] = ""
reportSizesN n p (i:is) = printf "%3s :: %s\n" (show n) (show size) ++ reportSizesN (n + 1) p' is
where
p' = deriveStep p i
size = parserSize p'
|
taktoa/derp
|
src/Text/Derp.hs
|
bsd-3-clause
| 21,719 | 0 | 21 | 5,789 | 9,194 | 4,686 | 4,508 | -1 | -1 |
--------------------------------------------------------------------------------
-- | Top-level module exporting all modules that are interesting for the user
{-# LANGUAGE CPP #-}
module Hakyll
( module Hakyll.Core.Compiler
, module Hakyll.Core.Configuration
, module Hakyll.Core.File
, module Hakyll.Core.Identifier
, module Hakyll.Core.Identifier.Pattern
, module Hakyll.Core.Item
, module Hakyll.Core.Metadata
, module Hakyll.Core.Routes
, module Hakyll.Core.Rules
, module Hakyll.Core.UnixFilter
, module Hakyll.Core.Util.File
, module Hakyll.Core.Util.String
, module Hakyll.Core.Writable
, module Hakyll.Main
, module Hakyll.Web.CompressCss
, module Hakyll.Web.Feed
, module Hakyll.Web.Html
, module Hakyll.Web.Html.RelativizeUrls
, module Hakyll.Web.Pandoc
, module Hakyll.Web.Pandoc.Biblio
, module Hakyll.Web.Pandoc.FileType
, module Hakyll.Web.Tags
, module Hakyll.Web.Template
, module Hakyll.Web.Template.Context
, module Hakyll.Web.Template.List
, module Hakyll.Web.Template.Read
) where
--------------------------------------------------------------------------------
import Hakyll.Core.Compiler
import Hakyll.Core.Configuration
import Hakyll.Core.File
import Hakyll.Core.Identifier
import Hakyll.Core.Identifier.Pattern
import Hakyll.Core.Item
import Hakyll.Core.Metadata
import Hakyll.Core.Routes
import Hakyll.Core.Rules
import Hakyll.Core.UnixFilter
import Hakyll.Core.Util.File
import Hakyll.Core.Util.String
import Hakyll.Core.Writable
import Hakyll.Main
import Hakyll.Web.CompressCss
import Hakyll.Web.Feed
import Hakyll.Web.Html
import Hakyll.Web.Html.RelativizeUrls
import Hakyll.Web.Pandoc
import Hakyll.Web.Pandoc.Biblio
import Hakyll.Web.Pandoc.FileType
import Hakyll.Web.Tags
import Hakyll.Web.Template
import Hakyll.Web.Template.Context
import Hakyll.Web.Template.List
import Hakyll.Web.Template.Read
|
bergmark/hakyll
|
src/Hakyll.hs
|
bsd-3-clause
| 2,199 | 0 | 5 | 523 | 364 | 258 | 106 | 54 | 0 |
{-# LANGUAGE TupleSections, OverloadedStrings, QuasiQuotes, TemplateHaskell, TypeFamilies, RecordWildCards,
DeriveGeneric ,MultiParamTypeClasses ,FlexibleInstances #-}
module Protocol.ROC.PointTypes.PointType86 where
import GHC.Generics
import Data.Word
import Data.Binary
import Protocol.ROC.Utils
data PointType86 = PointType86 {
pointType86MaxNumExtHistPnts :: !PointType86MaxNumExtHistPnts
,pointType86SampleLogInterval :: !PointType86SampleLogInterval
,pointType86PointTagIDTLP1 :: !PointType86PointTagIDTLP1
,pointType86ExtHistLogPnt1 :: !PointType86ExtHistLogPnt1
,pointType86ArchiveType1 :: !PointType86ArchiveType1
,pointType86AveragingorRateType1 :: !PointType86AveragingorRateType1
,pointType86PointTagIDTLP2 :: !PointType86PointTagIDTLP2
,pointType86ExtHistLogPnt2 :: !PointType86ExtHistLogPnt2
,pointType86ArchiveType2 :: !PointType86ArchiveType2
,pointType86AveragingorRateType2 :: !PointType86AveragingorRateType2
,pointType86PointTagIDTLP3 :: !PointType86PointTagIDTLP3
,pointType86ExtHistLogPnt3 :: !PointType86ExtHistLogPnt3
,pointType86ArchiveType3 :: !PointType86ArchiveType3
,pointType86AveragingorRateType3 :: !PointType86AveragingorRateType3
,pointType86PointTagIDTLP4 :: !PointType86PointTagIDTLP4
,pointType86ExtHistLogPnt4 :: !PointType86ExtHistLogPnt4
,pointType86ArchiveType4 :: !PointType86ArchiveType4
,pointType86AveragingorRateType4 :: !PointType86AveragingorRateType4
,pointType86PointTagIDTLP5 :: !PointType86PointTagIDTLP5
,pointType86ExtHistLogPnt5 :: !PointType86ExtHistLogPnt5
,pointType86ArchiveType5 :: !PointType86ArchiveType5
,pointType86AveragingorRateType5 :: !PointType86AveragingorRateType5
,pointType86PointTagIDTLP6 :: !PointType86PointTagIDTLP6
,pointType86ExtHistLogPnt6 :: !PointType86ExtHistLogPnt6
,pointType86ArchiveType6 :: !PointType86ArchiveType6
,pointType86AveragingorRateType6 :: !PointType86AveragingorRateType6
,pointType86PointTagIDTLP7 :: !PointType86PointTagIDTLP7
,pointType86ExtHistLogPnt7 :: !PointType86ExtHistLogPnt7
,pointType86ArchiveType7 :: !PointType86ArchiveType7
,pointType86AveragingorRateType7 :: !PointType86AveragingorRateType7
,pointType86PointTagIDTLP8 :: !PointType86PointTagIDTLP8
,pointType86ExtHistLogPnt8 :: !PointType86ExtHistLogPnt8
,pointType86ArchiveType8 :: !PointType86ArchiveType8
,pointType86AveragingorRateType8 :: !PointType86AveragingorRateType8
,pointType86PointTagIDTLP9 :: !PointType86PointTagIDTLP9
,pointType86ExtHistLogPnt9 :: !PointType86ExtHistLogPnt9
,pointType86ArchiveType9 :: !PointType86ArchiveType9
,pointType86AveragingorRateType9 :: !PointType86AveragingorRateType9
,pointType86PointTagIDTLP10 :: !PointType86PointTagIDTLP10
,pointType86ExtHistLogPnt10 :: !PointType86ExtHistLogPnt10
,pointType86ArchiveType10 :: !PointType86ArchiveType10
,pointType86AveragingorRateType10 :: !PointType86AveragingorRateType10
,pointType86PointTagIDTLP11 :: !PointType86PointTagIDTLP11
,pointType86ExtHistLogPnt11 :: !PointType86ExtHistLogPnt11
,pointType86ArchiveType11 :: !PointType86ArchiveType11
,pointType86AveragingorRateType11 :: !PointType86AveragingorRateType11
,pointType86PointTagIDTLP12 :: !PointType86PointTagIDTLP12
,pointType86ExtHistLogPnt12 :: !PointType86ExtHistLogPnt12
,pointType86ArchiveType12 :: !PointType86ArchiveType12
,pointType86AveragingorRateType12 :: !PointType86AveragingorRateType12
,pointType86PointTagIDTLP13 :: !PointType86PointTagIDTLP13
,pointType86ExtHistLogPnt13 :: !PointType86ExtHistLogPnt13
,pointType86ArchiveType13 :: !PointType86ArchiveType13
,pointType86AveragingorRateType13 :: !PointType86AveragingorRateType13
,pointType86PointTagIDTLP14 :: !PointType86PointTagIDTLP14
,pointType86ExtHistLogPnt14 :: !PointType86ExtHistLogPnt14
,pointType86ArchiveType14 :: !PointType86ArchiveType14
,pointType86AveragingorRateType14 :: !PointType86AveragingorRateType14
,pointType86PointTagIDTLP15 :: !PointType86PointTagIDTLP15
,pointType86ExtHistLogPnt15 :: !PointType86ExtHistLogPnt15
,pointType86ArchiveType15 :: !PointType86ArchiveType15
,pointType86AveragingorRateType15 :: !PointType86AveragingorRateType15
,pointType86PointTagIDTLP16 :: !PointType86PointTagIDTLP16
,pointType86ExtHistLogPnt16 :: !PointType86ExtHistLogPnt16
,pointType86ArchiveType16 :: !PointType86ArchiveType16
,pointType86AveragingorRateType16 :: !PointType86AveragingorRateType16
,pointType86PointTagIDTLP17 :: !PointType86PointTagIDTLP17
,pointType86ExtHistLogPnt17 :: !PointType86ExtHistLogPnt17
,pointType86ArchiveType17 :: !PointType86ArchiveType17
,pointType86AveragingorRateType17 :: !PointType86AveragingorRateType17
,pointType86PointTagIDTLP18 :: !PointType86PointTagIDTLP18
,pointType86ExtHistLogPnt18 :: !PointType86ExtHistLogPnt18
,pointType86ArchiveType18 :: !PointType86ArchiveType18
,pointType86AveragingorRateType18 :: !PointType86AveragingorRateType18
,pointType86PointTagIDTLP19 :: !PointType86PointTagIDTLP19
,pointType86ExtHistLogPnt19 :: !PointType86ExtHistLogPnt19
,pointType86ArchiveType19 :: !PointType86ArchiveType19
,pointType86AveragingorRateType19 :: !PointType86AveragingorRateType19
,pointType86PointTagIDTLP20 :: !PointType86PointTagIDTLP20
,pointType86ExtHistLogPnt20 :: !PointType86ExtHistLogPnt20
,pointType86ArchiveType20 :: !PointType86ArchiveType20
,pointType86AveragingorRateType20 :: !PointType86AveragingorRateType20
,pointType86PointTagIDTLP21 :: !PointType86PointTagIDTLP21
,pointType86ExtHistLogPnt21 :: !PointType86ExtHistLogPnt21
,pointType86ArchiveType21 :: !PointType86ArchiveType21
,pointType86AveragingorRateType21 :: !PointType86AveragingorRateType21
,pointType86PointTagIDTLP22 :: !PointType86PointTagIDTLP22
,pointType86ExtHistLogPnt22 :: !PointType86ExtHistLogPnt22
,pointType86ArchiveType22 :: !PointType86ArchiveType22
,pointType86AveragingorRateType22 :: !PointType86AveragingorRateType22
,pointType86PointTagIDTLP23 :: !PointType86PointTagIDTLP23
,pointType86ExtHistLogPnt23 :: !PointType86ExtHistLogPnt23
,pointType86ArchiveType23 :: !PointType86ArchiveType23
,pointType86AveragingorRateType23 :: !PointType86AveragingorRateType23
,pointType86PointTagIDTLP24 :: !PointType86PointTagIDTLP24
,pointType86ExtHistLogPnt24 :: !PointType86ExtHistLogPnt24
,pointType86ArchiveType24 :: !PointType86ArchiveType24
,pointType86AveragingorRateType24 :: !PointType86AveragingorRateType24
,pointType86PointTagIDTLP25 :: !PointType86PointTagIDTLP25
,pointType86ExtHistLogPnt25 :: !PointType86ExtHistLogPnt25
,pointType86ArchiveType25 :: !PointType86ArchiveType25
,pointType86AveragingorRateType25 :: !PointType86AveragingorRateType25
,pointType86PointTagIDTLP26 :: !PointType86PointTagIDTLP26
,pointType86ExtHistLogPnt26 :: !PointType86ExtHistLogPnt26
,pointType86ArchiveType26 :: !PointType86ArchiveType26
,pointType86AveragingorRateType26 :: !PointType86AveragingorRateType26
,pointType86PointTagIDTLP27 :: !PointType86PointTagIDTLP27
,pointType86ExtHistLogPnt27 :: !PointType86ExtHistLogPnt27
,pointType86ArchiveType27 :: !PointType86ArchiveType27
,pointType86AveragingorRateType27 :: !PointType86AveragingorRateType27
,pointType86PointTagIDTLP28 :: !PointType86PointTagIDTLP28
,pointType86ExtHistLogPnt28 :: !PointType86ExtHistLogPnt28
,pointType86ArchiveType28 :: !PointType86ArchiveType28
,pointType86AveragingorRateType28 :: !PointType86AveragingorRateType28
,pointType86PointTagIDTLP29 :: !PointType86PointTagIDTLP29
,pointType86ExtHistLogPnt29 :: !PointType86ExtHistLogPnt29
,pointType86ArchiveType29 :: !PointType86ArchiveType29
,pointType86AveragingorRateType29 :: !PointType86AveragingorRateType29
,pointType86PointTagIDTLP30 :: !PointType86PointTagIDTLP30
,pointType86ExtHistLogPnt30 :: !PointType86ExtHistLogPnt30
,pointType86ArchiveType30 :: !PointType86ArchiveType30
,pointType86AveragingorRateType30 :: !PointType86AveragingorRateType30
,pointType86PointTagIDTLP31 :: !PointType86PointTagIDTLP31
,pointType86ExtHistLogPnt31 :: !PointType86ExtHistLogPnt31
,pointType86ArchiveType31 :: !PointType86ArchiveType31
,pointType86AveragingorRateType31 :: !PointType86AveragingorRateType31
,pointType86PointTagIDTLP32 :: !PointType86PointTagIDTLP32
,pointType86ExtHistLogPnt32 :: !PointType86ExtHistLogPnt32
,pointType86ArchiveType32 :: !PointType86ArchiveType32
,pointType86AveragingorRateType32 :: !PointType86AveragingorRateType32
,pointType86PointTagIDTLP33 :: !PointType86PointTagIDTLP33
,pointType86ExtHistLogPnt33 :: !PointType86ExtHistLogPnt33
,pointType86ArchiveType33 :: !PointType86ArchiveType33
,pointType86AveragingorRateType33 :: !PointType86AveragingorRateType33
,pointType86PointTagIDTLP34 :: !PointType86PointTagIDTLP34
,pointType86ExtHistLogPnt34 :: !PointType86ExtHistLogPnt34
,pointType86ArchiveType34 :: !PointType86ArchiveType34
,pointType86AveragingorRateType34 :: !PointType86AveragingorRateType34
,pointType86PointTagIDTLP35 :: !PointType86PointTagIDTLP35
,pointType86ExtHistLogPnt35 :: !PointType86ExtHistLogPnt35
,pointType86ArchiveType35 :: !PointType86ArchiveType35
,pointType86AveragingorRateType35 :: !PointType86AveragingorRateType35
,pointType86PointTagIDTLP36 :: !PointType86PointTagIDTLP36
,pointType86ExtHistLogPnt36 :: !PointType86ExtHistLogPnt36
,pointType86ArchiveType36 :: !PointType86ArchiveType36
,pointType86AveragingorRateType36 :: !PointType86AveragingorRateType36
,pointType86PointTagIDTLP37 :: !PointType86PointTagIDTLP37
,pointType86ExtHistLogPnt37 :: !PointType86ExtHistLogPnt37
,pointType86ArchiveType37 :: !PointType86ArchiveType37
,pointType86AveragingorRateType37 :: !PointType86AveragingorRateType37
,pointType86PointTagIDTLP38 :: !PointType86PointTagIDTLP38
,pointType86ExtHistLogPnt38 :: !PointType86ExtHistLogPnt38
,pointType86ArchiveType38 :: !PointType86ArchiveType38
,pointType86AveragingorRateType38 :: !PointType86AveragingorRateType38
,pointType86PointTagIDTLP39 :: !PointType86PointTagIDTLP39
,pointType86ExtHistLogPnt39 :: !PointType86ExtHistLogPnt39
,pointType86ArchiveType39 :: !PointType86ArchiveType39
,pointType86AveragingorRateType39 :: !PointType86AveragingorRateType39
,pointType86PointTagIDTLP40 :: !PointType86PointTagIDTLP40
,pointType86ExtHistLogPnt40 :: !PointType86ExtHistLogPnt40
,pointType86ArchiveType40 :: !PointType86ArchiveType40
,pointType86AveragingorRateType40 :: !PointType86AveragingorRateType40
,pointType86PointTagIDTLP41 :: !PointType86PointTagIDTLP41
,pointType86ExtHistLogPnt41 :: !PointType86ExtHistLogPnt41
,pointType86ArchiveType41 :: !PointType86ArchiveType41
,pointType86AveragingorRateType41 :: !PointType86AveragingorRateType41
,pointType86PointTagIDTLP42 :: !PointType86PointTagIDTLP42
,pointType86ExtHistLogPnt42 :: !PointType86ExtHistLogPnt42
,pointType86ArchiveType42 :: !PointType86ArchiveType42
,pointType86AveragingorRateType42 :: !PointType86AveragingorRateType42
,pointType86PointTagIDTLP43 :: !PointType86PointTagIDTLP43
,pointType86ExtHistLogPnt43 :: !PointType86ExtHistLogPnt43
,pointType86ArchiveType43 :: !PointType86ArchiveType43
,pointType86AveragingorRateType43 :: !PointType86AveragingorRateType43
,pointType86PointTagIDTLP44 :: !PointType86PointTagIDTLP44
,pointType86ExtHistLogPnt44 :: !PointType86ExtHistLogPnt44
,pointType86ArchiveType44 :: !PointType86ArchiveType44
,pointType86AveragingorRateType44 :: !PointType86AveragingorRateType44
,pointType86PointTagIDTLP45 :: !PointType86PointTagIDTLP45
,pointType86ExtHistLogPnt45 :: !PointType86ExtHistLogPnt45
,pointType86ArchiveType45 :: !PointType86ArchiveType45
,pointType86AveragingorRateType45 :: !PointType86AveragingorRateType45
,pointType86PointTagIDTLP46 :: !PointType86PointTagIDTLP46
,pointType86ExtHistLogPnt46 :: !PointType86ExtHistLogPnt46
,pointType86ArchiveType46 :: !PointType86ArchiveType46
,pointType86AveragingorRateType46 :: !PointType86AveragingorRateType46
,pointType86PointTagIDTLP47 :: !PointType86PointTagIDTLP47
,pointType86ExtHistLogPnt47 :: !PointType86ExtHistLogPnt47
,pointType86ArchiveType47 :: !PointType86ArchiveType47
,pointType86AveragingorRateType47 :: !PointType86AveragingorRateType47
,pointType86PointTagIDTLP48 :: !PointType86PointTagIDTLP48
,pointType86ExtHistLogPnt48 :: !PointType86ExtHistLogPnt48
,pointType86ArchiveType48 :: !PointType86ArchiveType48
,pointType86AveragingorRateType48 :: !PointType86AveragingorRateType48
,pointType86PointTagIDTLP49 :: !PointType86PointTagIDTLP49
,pointType86ExtHistLogPnt49 :: !PointType86ExtHistLogPnt49
,pointType86ArchiveType49 :: !PointType86ArchiveType49
,pointType86AveragingorRateType49 :: !PointType86AveragingorRateType49
,pointType86PointTagIDTLP50 :: !PointType86PointTagIDTLP50
,pointType86ExtHistLogPnt50 :: !PointType86ExtHistLogPnt50
,pointType86ArchiveType50 :: !PointType86ArchiveType50
,pointType86AveragingorRateType50 :: !PointType86AveragingorRateType50
} deriving (Read,Eq, Show, Generic)
type PointType86MaxNumExtHistPnts = Word8
type PointType86SampleLogInterval = Word8
type PointType86PointTagIDTLP1 = [Word8]
type PointType86ExtHistLogPnt1 = [Word8]
type PointType86ArchiveType1 = Word8
type PointType86AveragingorRateType1 = Word8
type PointType86PointTagIDTLP2 = [Word8]
type PointType86ExtHistLogPnt2 = [Word8]
type PointType86ArchiveType2 = Word8
type PointType86AveragingorRateType2 = Word8
type PointType86PointTagIDTLP3 = [Word8]
type PointType86ExtHistLogPnt3 = [Word8]
type PointType86ArchiveType3 = Word8
type PointType86AveragingorRateType3 = Word8
type PointType86PointTagIDTLP4 = [Word8]
type PointType86ExtHistLogPnt4 = [Word8]
type PointType86ArchiveType4 = Word8
type PointType86AveragingorRateType4 = Word8
type PointType86PointTagIDTLP5 = [Word8]
type PointType86ExtHistLogPnt5 = [Word8]
type PointType86ArchiveType5 = Word8
type PointType86AveragingorRateType5 = Word8
type PointType86PointTagIDTLP6 = [Word8]
type PointType86ExtHistLogPnt6 = [Word8]
type PointType86ArchiveType6 = Word8
type PointType86AveragingorRateType6 = Word8
type PointType86PointTagIDTLP7 = [Word8]
type PointType86ExtHistLogPnt7 = [Word8]
type PointType86ArchiveType7 = Word8
type PointType86AveragingorRateType7 = Word8
type PointType86PointTagIDTLP8 = [Word8]
type PointType86ExtHistLogPnt8 = [Word8]
type PointType86ArchiveType8 = Word8
type PointType86AveragingorRateType8 = Word8
type PointType86PointTagIDTLP9 = [Word8]
type PointType86ExtHistLogPnt9 = [Word8]
type PointType86ArchiveType9 = Word8
type PointType86AveragingorRateType9 = Word8
type PointType86PointTagIDTLP10 = [Word8]
type PointType86ExtHistLogPnt10 = [Word8]
type PointType86ArchiveType10 = Word8
type PointType86AveragingorRateType10 = Word8
type PointType86PointTagIDTLP11 = [Word8]
type PointType86ExtHistLogPnt11 = [Word8]
type PointType86ArchiveType11 = Word8
type PointType86AveragingorRateType11 = Word8
type PointType86PointTagIDTLP12 = [Word8]
type PointType86ExtHistLogPnt12 = [Word8]
type PointType86ArchiveType12 = Word8
type PointType86AveragingorRateType12 = Word8
type PointType86PointTagIDTLP13 = [Word8]
type PointType86ExtHistLogPnt13 = [Word8]
type PointType86ArchiveType13 = Word8
type PointType86AveragingorRateType13 = Word8
type PointType86PointTagIDTLP14 = [Word8]
type PointType86ExtHistLogPnt14 = [Word8]
type PointType86ArchiveType14 = Word8
type PointType86AveragingorRateType14 = Word8
type PointType86PointTagIDTLP15 = [Word8]
type PointType86ExtHistLogPnt15 = [Word8]
type PointType86ArchiveType15 = Word8
type PointType86AveragingorRateType15 = Word8
type PointType86PointTagIDTLP16 = [Word8]
type PointType86ExtHistLogPnt16 = [Word8]
type PointType86ArchiveType16 = Word8
type PointType86AveragingorRateType16 = Word8
type PointType86PointTagIDTLP17 = [Word8]
type PointType86ExtHistLogPnt17 = [Word8]
type PointType86ArchiveType17 = Word8
type PointType86AveragingorRateType17 = Word8
type PointType86PointTagIDTLP18 = [Word8]
type PointType86ExtHistLogPnt18 = [Word8]
type PointType86ArchiveType18 = Word8
type PointType86AveragingorRateType18 = Word8
type PointType86PointTagIDTLP19 = [Word8]
type PointType86ExtHistLogPnt19 = [Word8]
type PointType86ArchiveType19 = Word8
type PointType86AveragingorRateType19 = Word8
type PointType86PointTagIDTLP20 = [Word8]
type PointType86ExtHistLogPnt20 = [Word8]
type PointType86ArchiveType20 = Word8
type PointType86AveragingorRateType20 = Word8
type PointType86PointTagIDTLP21 = [Word8]
type PointType86ExtHistLogPnt21 = [Word8]
type PointType86ArchiveType21 = Word8
type PointType86AveragingorRateType21 = Word8
type PointType86PointTagIDTLP22 = [Word8]
type PointType86ExtHistLogPnt22 = [Word8]
type PointType86ArchiveType22 = Word8
type PointType86AveragingorRateType22 = Word8
type PointType86PointTagIDTLP23 = [Word8]
type PointType86ExtHistLogPnt23 = [Word8]
type PointType86ArchiveType23 = Word8
type PointType86AveragingorRateType23 = Word8
type PointType86PointTagIDTLP24 = [Word8]
type PointType86ExtHistLogPnt24 = [Word8]
type PointType86ArchiveType24 = Word8
type PointType86AveragingorRateType24 = Word8
type PointType86PointTagIDTLP25 = [Word8]
type PointType86ExtHistLogPnt25 = [Word8]
type PointType86ArchiveType25 = Word8
type PointType86AveragingorRateType25 = Word8
type PointType86PointTagIDTLP26 = [Word8]
type PointType86ExtHistLogPnt26 = [Word8]
type PointType86ArchiveType26 = Word8
type PointType86AveragingorRateType26 = Word8
type PointType86PointTagIDTLP27 = [Word8]
type PointType86ExtHistLogPnt27 = [Word8]
type PointType86ArchiveType27 = Word8
type PointType86AveragingorRateType27 = Word8
type PointType86PointTagIDTLP28 = [Word8]
type PointType86ExtHistLogPnt28 = [Word8]
type PointType86ArchiveType28 = Word8
type PointType86AveragingorRateType28 = Word8
type PointType86PointTagIDTLP29 = [Word8]
type PointType86ExtHistLogPnt29 = [Word8]
type PointType86ArchiveType29 = Word8
type PointType86AveragingorRateType29 = Word8
type PointType86PointTagIDTLP30 = [Word8]
type PointType86ExtHistLogPnt30 = [Word8]
type PointType86ArchiveType30 = Word8
type PointType86AveragingorRateType30 = Word8
type PointType86PointTagIDTLP31 = [Word8]
type PointType86ExtHistLogPnt31 = [Word8]
type PointType86ArchiveType31 = Word8
type PointType86AveragingorRateType31 = Word8
type PointType86PointTagIDTLP32 = [Word8]
type PointType86ExtHistLogPnt32 = [Word8]
type PointType86ArchiveType32 = Word8
type PointType86AveragingorRateType32 = Word8
type PointType86PointTagIDTLP33 = [Word8]
type PointType86ExtHistLogPnt33 = [Word8]
type PointType86ArchiveType33 = Word8
type PointType86AveragingorRateType33 = Word8
type PointType86PointTagIDTLP34 = [Word8]
type PointType86ExtHistLogPnt34 = [Word8]
type PointType86ArchiveType34 = Word8
type PointType86AveragingorRateType34 = Word8
type PointType86PointTagIDTLP35 = [Word8]
type PointType86ExtHistLogPnt35 = [Word8]
type PointType86ArchiveType35 = Word8
type PointType86AveragingorRateType35 = Word8
type PointType86PointTagIDTLP36 = [Word8]
type PointType86ExtHistLogPnt36 = [Word8]
type PointType86ArchiveType36 = Word8
type PointType86AveragingorRateType36 = Word8
type PointType86PointTagIDTLP37 = [Word8]
type PointType86ExtHistLogPnt37 = [Word8]
type PointType86ArchiveType37 = Word8
type PointType86AveragingorRateType37 = Word8
type PointType86PointTagIDTLP38 = [Word8]
type PointType86ExtHistLogPnt38 = [Word8]
type PointType86ArchiveType38 = Word8
type PointType86AveragingorRateType38 = Word8
type PointType86PointTagIDTLP39 = [Word8]
type PointType86ExtHistLogPnt39 = [Word8]
type PointType86ArchiveType39 = Word8
type PointType86AveragingorRateType39 = Word8
type PointType86PointTagIDTLP40 = [Word8]
type PointType86ExtHistLogPnt40 = [Word8]
type PointType86ArchiveType40 = Word8
type PointType86AveragingorRateType40 = Word8
type PointType86PointTagIDTLP41 = [Word8]
type PointType86ExtHistLogPnt41 = [Word8]
type PointType86ArchiveType41 = Word8
type PointType86AveragingorRateType41 = Word8
type PointType86PointTagIDTLP42 = [Word8]
type PointType86ExtHistLogPnt42 = [Word8]
type PointType86ArchiveType42 = Word8
type PointType86AveragingorRateType42 = Word8
type PointType86PointTagIDTLP43 = [Word8]
type PointType86ExtHistLogPnt43 = [Word8]
type PointType86ArchiveType43 = Word8
type PointType86AveragingorRateType43 = Word8
type PointType86PointTagIDTLP44 = [Word8]
type PointType86ExtHistLogPnt44 = [Word8]
type PointType86ArchiveType44 = Word8
type PointType86AveragingorRateType44 = Word8
type PointType86PointTagIDTLP45 = [Word8]
type PointType86ExtHistLogPnt45 = [Word8]
type PointType86ArchiveType45 = Word8
type PointType86AveragingorRateType45 = Word8
type PointType86PointTagIDTLP46 = [Word8]
type PointType86ExtHistLogPnt46 = [Word8]
type PointType86ArchiveType46 = Word8
type PointType86AveragingorRateType46 = Word8
type PointType86PointTagIDTLP47 = [Word8]
type PointType86ExtHistLogPnt47 = [Word8]
type PointType86ArchiveType47 = Word8
type PointType86AveragingorRateType47 = Word8
type PointType86PointTagIDTLP48 = [Word8]
type PointType86ExtHistLogPnt48 = [Word8]
type PointType86ArchiveType48 = Word8
type PointType86AveragingorRateType48 = Word8
type PointType86PointTagIDTLP49 = [Word8]
type PointType86ExtHistLogPnt49 = [Word8]
type PointType86ArchiveType49 = Word8
type PointType86AveragingorRateType49 = Word8
type PointType86PointTagIDTLP50 = [Word8]
type PointType86ExtHistLogPnt50 = [Word8]
type PointType86ArchiveType50 = Word8
type PointType86AveragingorRateType50 = Word8
pointType86Parser :: Get PointType86
pointType86Parser = do
maxNumExtHistPnts <- getWord8
sampleLogInterval <- getWord8
pointTagIDTLP1 <- getTLP
extHistLogPnt1 <- getTLP
archiveType1 <- getWord8
averagingorRateType1 <- getWord8
pointTagIDTLP2 <- getTLP
extHistLogPnt2 <- getTLP
archiveType2 <- getWord8
averagingorRateType2 <- getWord8
pointTagIDTLP3 <- getTLP
extHistLogPnt3 <- getTLP
archiveType3 <- getWord8
averagingorRateType3 <- getWord8
pointTagIDTLP4 <- getTLP
extHistLogPnt4 <- getTLP
archiveType4 <- getWord8
averagingorRateType4 <- getWord8
pointTagIDTLP5 <- getTLP
extHistLogPnt5 <- getTLP
archiveType5 <- getWord8
averagingorRateType5 <- getWord8
pointTagIDTLP6 <- getTLP
extHistLogPnt6 <- getTLP
archiveType6 <- getWord8
averagingorRateType6 <- getWord8
pointTagIDTLP7 <- getTLP
extHistLogPnt7 <- getTLP
archiveType7 <- getWord8
averagingorRateType7 <- getWord8
pointTagIDTLP8 <- getTLP
extHistLogPnt8 <- getTLP
archiveType8 <- getWord8
averagingorRateType8 <- getWord8
pointTagIDTLP9 <- getTLP
extHistLogPnt9 <- getTLP
archiveType9 <- getWord8
averagingorRateType9 <- getWord8
pointTagIDTLP10 <- getTLP
extHistLogPnt10 <- getTLP
archiveType10 <- getWord8
averagingorRateType10 <-getWord8
pointTagIDTLP11 <- getTLP
extHistLogPnt11 <- getTLP
archiveType11 <- getWord8
averagingorRateType11 <- getWord8
pointTagIDTLP12 <- getTLP
extHistLogPnt12 <- getTLP
archiveType12 <- getWord8
averagingorRateType12 <- getWord8
pointTagIDTLP13 <- getTLP
extHistLogPnt13 <- getTLP
archiveType13 <- getWord8
averagingorRateType13 <- getWord8
pointTagIDTLP14 <- getTLP
extHistLogPnt14 <- getTLP
archiveType14 <- getWord8
averagingorRateType14 <- getWord8
pointTagIDTLP15 <- getTLP
extHistLogPnt15 <- getTLP
archiveType15 <- getWord8
averagingorRateType15 <- getWord8
pointTagIDTLP16 <- getTLP
extHistLogPnt16 <- getTLP
archiveType16 <- getWord8
averagingorRateType16 <- getWord8
pointTagIDTLP17 <- getTLP
extHistLogPnt17 <- getTLP
archiveType17 <- getWord8
averagingorRateType17 <- getWord8
pointTagIDTLP18 <- getTLP
extHistLogPnt18 <- getTLP
archiveType18 <- getWord8
averagingorRateType18 <- getWord8
pointTagIDTLP19 <- getTLP
extHistLogPnt19 <- getTLP
archiveType19 <- getWord8
averagingorRateType19 <- getWord8
pointTagIDTLP20 <- getTLP
extHistLogPnt20 <- getTLP
archiveType20 <- getWord8
averagingorRateType20 <- getWord8
pointTagIDTLP21 <- getTLP
extHistLogPnt21 <- getTLP
archiveType21 <- getWord8
averagingorRateType21 <- getWord8
pointTagIDTLP22 <- getTLP
extHistLogPnt22 <- getTLP
archiveType22 <- getWord8
averagingorRateType22 <- getWord8
pointTagIDTLP23 <- getTLP
extHistLogPnt23 <- getTLP
archiveType23 <- getWord8
averagingorRateType23 <- getWord8
pointTagIDTLP24 <- getTLP
extHistLogPnt24 <- getTLP
archiveType24 <- getWord8
averagingorRateType24 <- getWord8
pointTagIDTLP25 <- getTLP
extHistLogPnt25 <- getTLP
archiveType25 <- getWord8
averagingorRateType25 <- getWord8
pointTagIDTLP26 <- getTLP
extHistLogPnt26 <- getTLP
archiveType26 <- getWord8
averagingorRateType26 <- getWord8
pointTagIDTLP27 <- getTLP
extHistLogPnt27 <- getTLP
archiveType27 <- getWord8
averagingorRateType27 <- getWord8
pointTagIDTLP28 <- getTLP
extHistLogPnt28 <- getTLP
archiveType28 <- getWord8
averagingorRateType28 <- getWord8
pointTagIDTLP29 <- getTLP
extHistLogPnt29 <- getTLP
archiveType29 <- getWord8
averagingorRateType29 <- getWord8
pointTagIDTLP30 <- getTLP
extHistLogPnt30 <- getTLP
archiveType30 <- getWord8
averagingorRateType30 <- getWord8
pointTagIDTLP31 <- getTLP
extHistLogPnt31 <- getTLP
archiveType31 <- getWord8
averagingorRateType31 <- getWord8
pointTagIDTLP32 <- getTLP
extHistLogPnt32 <- getTLP
archiveType32 <- getWord8
averagingorRateType32 <- getWord8
pointTagIDTLP33 <- getTLP
extHistLogPnt33 <- getTLP
archiveType33 <- getWord8
averagingorRateType33 <- getWord8
pointTagIDTLP34 <- getTLP
extHistLogPnt34 <- getTLP
archiveType34 <- getWord8
averagingorRateType34 <- getWord8
pointTagIDTLP35 <- getTLP
extHistLogPnt35 <- getTLP
archiveType35 <- getWord8
averagingorRateType35 <-getWord8
pointTagIDTLP36 <- getTLP
extHistLogPnt36 <- getTLP
archiveType36 <- getWord8
averagingorRateType36 <- getWord8
pointTagIDTLP37 <- getTLP
extHistLogPnt37 <- getTLP
archiveType37 <- getWord8
averagingorRateType37 <- getWord8
pointTagIDTLP38 <- getTLP
extHistLogPnt38 <- getTLP
archiveType38 <- getWord8
averagingorRateType38 <- getWord8
pointTagIDTLP39 <- getTLP
extHistLogPnt39 <- getTLP
archiveType39 <- getWord8
averagingorRateType39 <- getWord8
pointTagIDTLP40 <- getTLP
extHistLogPnt40 <- getTLP
archiveType40 <- getWord8
averagingorRateType40 <- getWord8
pointTagIDTLP41 <- getTLP
extHistLogPnt41 <- getTLP
archiveType41 <- getWord8
averagingorRateType41 <- getWord8
pointTagIDTLP42 <- getTLP
extHistLogPnt42 <- getTLP
archiveType42 <- getWord8
averagingorRateType42 <- getWord8
pointTagIDTLP43 <- getTLP
extHistLogPnt43 <- getTLP
archiveType43 <- getWord8
averagingorRateType43 <- getWord8
pointTagIDTLP44 <- getTLP
extHistLogPnt44 <- getTLP
archiveType44 <- getWord8
averagingorRateType44 <- getWord8
pointTagIDTLP45 <- getTLP
extHistLogPnt45 <- getTLP
archiveType45 <- getWord8
averagingorRateType45 <- getWord8
pointTagIDTLP46 <- getTLP
extHistLogPnt46 <- getTLP
archiveType46 <- getWord8
averagingorRateType46 <- getWord8
pointTagIDTLP47 <- getTLP
extHistLogPnt47 <- getTLP
archiveType47 <- getWord8
averagingorRateType47 <- getWord8
pointTagIDTLP48 <- getTLP
extHistLogPnt48 <- getTLP
archiveType48 <- getWord8
averagingorRateType48 <- getWord8
pointTagIDTLP49 <- getTLP
extHistLogPnt49 <- getTLP
archiveType49 <- getWord8
averagingorRateType49 <- getWord8
pointTagIDTLP50 <- getTLP
extHistLogPnt50 <- getTLP
archiveType50 <- getWord8
averagingorRateType50 <- getWord8
return $ PointType86
maxNumExtHistPnts sampleLogInterval pointTagIDTLP1 extHistLogPnt1 archiveType1 averagingorRateType1 pointTagIDTLP2 extHistLogPnt2 archiveType2 averagingorRateType2
pointTagIDTLP3 extHistLogPnt3 archiveType3 averagingorRateType3 pointTagIDTLP4 extHistLogPnt4 archiveType4 averagingorRateType4 pointTagIDTLP5 extHistLogPnt5 archiveType5
averagingorRateType5 pointTagIDTLP6 extHistLogPnt6 archiveType6 averagingorRateType6 pointTagIDTLP7 extHistLogPnt7 archiveType7 averagingorRateType7 pointTagIDTLP8
extHistLogPnt8 archiveType8 averagingorRateType8 pointTagIDTLP9 extHistLogPnt9 archiveType9 averagingorRateType9 pointTagIDTLP10 extHistLogPnt10 archiveType10
averagingorRateType10 pointTagIDTLP11 extHistLogPnt11 archiveType11 averagingorRateType11 pointTagIDTLP12 extHistLogPnt12 archiveType12 averagingorRateType12 pointTagIDTLP13
extHistLogPnt13 archiveType13 averagingorRateType13 pointTagIDTLP14 extHistLogPnt14 archiveType14 averagingorRateType14 pointTagIDTLP15 extHistLogPnt15 archiveType15
averagingorRateType15 pointTagIDTLP16 extHistLogPnt16 archiveType16 averagingorRateType16 pointTagIDTLP17 extHistLogPnt17 archiveType17 averagingorRateType17 pointTagIDTLP18
extHistLogPnt18 archiveType18 averagingorRateType18 pointTagIDTLP19 extHistLogPnt19 archiveType19 averagingorRateType19 pointTagIDTLP20 extHistLogPnt20 archiveType20
averagingorRateType20 pointTagIDTLP21 extHistLogPnt21 archiveType21 averagingorRateType21 pointTagIDTLP22 extHistLogPnt22 archiveType22 averagingorRateType22 pointTagIDTLP23
extHistLogPnt23 archiveType23 averagingorRateType23 pointTagIDTLP24 extHistLogPnt24 archiveType24 averagingorRateType24 pointTagIDTLP25 extHistLogPnt25 archiveType25
averagingorRateType25 pointTagIDTLP26 extHistLogPnt26 archiveType26 averagingorRateType26 pointTagIDTLP27 extHistLogPnt27 archiveType27 averagingorRateType27 pointTagIDTLP28
extHistLogPnt28 archiveType28 averagingorRateType28 pointTagIDTLP29 extHistLogPnt29 archiveType29 averagingorRateType29 pointTagIDTLP30 extHistLogPnt30 archiveType30
averagingorRateType30 pointTagIDTLP31 extHistLogPnt31 archiveType31 averagingorRateType31 pointTagIDTLP32 extHistLogPnt32 archiveType32 averagingorRateType32 pointTagIDTLP33
extHistLogPnt33 archiveType33 averagingorRateType33 pointTagIDTLP34 extHistLogPnt34 archiveType34 averagingorRateType34 pointTagIDTLP35 extHistLogPnt35 archiveType35
averagingorRateType35 pointTagIDTLP36 extHistLogPnt36 archiveType36 averagingorRateType36 pointTagIDTLP37 extHistLogPnt37 archiveType37 averagingorRateType37 pointTagIDTLP38
extHistLogPnt38 archiveType38 averagingorRateType38 pointTagIDTLP39 extHistLogPnt39 archiveType39 averagingorRateType39 pointTagIDTLP40 extHistLogPnt40 archiveType40
averagingorRateType40 pointTagIDTLP41 extHistLogPnt41 archiveType41 averagingorRateType41 pointTagIDTLP42 extHistLogPnt42 archiveType42 averagingorRateType42 pointTagIDTLP43
extHistLogPnt43 archiveType43 averagingorRateType43 pointTagIDTLP44 extHistLogPnt44 archiveType44 averagingorRateType44 pointTagIDTLP45 extHistLogPnt45 archiveType45
averagingorRateType45 pointTagIDTLP46 extHistLogPnt46 archiveType46 averagingorRateType46 pointTagIDTLP47 extHistLogPnt47 archiveType47 averagingorRateType47 pointTagIDTLP48
extHistLogPnt48 archiveType48 averagingorRateType48 pointTagIDTLP49 extHistLogPnt49 archiveType49 averagingorRateType49 pointTagIDTLP50 extHistLogPnt50 archiveType50 averagingorRateType50
|
jqpeterson/roc-translator
|
src/Protocol/ROC/PointTypes/PointType86.hs
|
bsd-3-clause
| 45,536 | 0 | 9 | 17,627 | 4,823 | 2,669 | 2,154 | 1,042 | 1 |
import Control.Applicative
import System.Console.OptMatch
import System.Console.OptMatch.Basic
import System.Console.OptMatch.Popular
import System.Environment(getArgs)
data Options = Options {
isSum :: Bool
, numbers :: [Integer]
} deriving (Show)
parser :: OptMatch Options
parser = popular defaultOptions $ \opts ->
opts { isSum = True } <$ keyword "--sum" <|>
(\n -> opts { numbers = n : numbers opts }) <$> integer where
defaultOptions = Options {
isSum = False
, numbers = []
}
main :: IO ()
main = do
args <- getArgs
case runOptMatch parser args of
Just (opts, args) -> let accumulate = if isSum opts then sum else maximum in
print $ accumulate $ numbers opts
Nothing -> putStrLn "Usage: [ --sum ] N [N ...]"
|
pasberth/optmatch
|
examples/SumOrMax.hs
|
bsd-3-clause
| 763 | 0 | 15 | 165 | 252 | 138 | 114 | 23 | 3 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module MXml where
import Music
import Data.Ratio
import Control.Monad.State.Lazy
import Data.Maybe
import Data.List
-- BUG: Consider case of multi attribute in single measure. Example, mutliple clefs in a single measure.
--
----------------------------------------------------------------------------------------------------
-- MusicXML Type
----------------------------------------------------------------------------------------------------
data MXStep = C | CD_ | D | DE_ | E | F | FG_ | G | GA_ | A | AB_ | B
deriving (Show, Read, Enum)
type MXTimeAnno = TimeAnno
type MXClefSign = ClefSign
data MXMode = MXMinor | MXMajor deriving (Eq)
-------------------------
data MXKey = MXKey {
mxfifths :: Int, --
mxmode :: MXMode
} deriving (Show, Read)
data MXTime = MXTime {
mxbeats :: Int,
mxbeattype :: Int,
mxtimeanno :: Maybe MXTimeAnno
} deriving (Show, Read)
data MXClef = MXClef {
mxclefsign :: MXClefSign, -- Clef sign
mxclefline :: Int, -- Clef line
mxclefoctalt :: Maybe Int
} deriving (Show, Read)
-----------
data MXPitch = MXPitch {
mxstep :: MXStep, -- Step (C,D,F,etc)
mxoctave :: Int, -- Ocatve
mxalter :: Maybe Int -- Pitch alter
} deriving (Show, Read)
data MXNoteType = Wn | Hn | Qn | En | Sn | Tn | S64n | On deriving (Enum)
-------------------------
data MXAttr = MXAttr {
mxdivs :: Maybe Int,
mxkey :: Maybe MXKey,
mxtime :: Maybe MXTime,
mxclef :: Maybe MXClef
} deriving (Show, Read)
data MXNote = MXNote {
mxpitch :: MXPitch,
mxduration :: Int,
mxvoice :: Int,
mxnotetype :: MXNoteType
} deriving (Show, Read)
-------------------------
data MXMeasElm = MXNoteElm MXNote |
MXAttrElm MXAttr |
MXMeasNum Int -- Int is measure number: <measure number="1">
deriving (Show, Read)
type MXMeasure = [MXMeasElm]
-- TODO: Parsing
-- * Note pitch annotating needs either:
-- 1) Note (absolute pitch, "accidental"); Modifier (key, clef)
-- * For double accidentals (double-sharp, flat-flat)
-- 2) Note (absolute pitch); Modifier (key, clef)
-- * Sufficient without oddball double accidental cases
-- 3) Note (absolute pitch); Modifier (key, clef, "accidental-prop")
data MusMXMeasState = MusMXMeasState {
sMeasNum :: Int,
sDivs :: Int,
sBeats :: Int,
sBeatType :: Int,
sPosition :: Duration
}
instance ConvertBothWay Music MXMeasure where
forward mxMeas = evalState (mapM f filteredMeasure) $ MusMXMeasState 1 0 0 0 (0%1) -- (MeasNum, divisions, beats, beat-type, position)
where filteredMeasure = filter notMeasurePartNumber mxMeas
notMeasurePartNumber (MXMeasNum _) = False
notMeasurePartNumber _ = True
f :: MXMeasElm -> State MusMXMeasState (Position, MusElm)
f (MXNoteElm n) = do
state <- get
let divisionDur = mxduration n
beatType = sBeatType state
divisions = sDivs state
noteDuration= divisionDur % (beatType * divisions)
ptch = forward $ mxpitch n
note = Note noteDuration ptch []
oldPosition = sPosition state
newPosition = oldPosition + noteDuration
put $ state {sPosition = newPosition}
return (oldPosition, NoteElm note)
f (MXAttrElm a) = do
let ms = [ (Just . ClefSym . forward) =<< (mxclef a)
, (Just . KeySym . forward) =<< (mxkey a)
, (Just . TimingSym . forward) =<< (mxtime a)
]
modsAnnos = ModElm $ sort $ catMaybes ms -- NOTE: Sorting at data conversion is safer as long as it is done consistently.
st <- get
put $ st { sDivs = fromMaybe (sDivs st) (mxdivs a)
, sBeats = maybe (sBeats st) mxbeats (mxtime a)
, sBeatType = maybe (sBeatType st) mxbeattype (mxtime a)
}
return (sPosition st, modsAnnos)
backward = undefined
instance ConvertBothWay TimeAnno TimeAnno where
forward = id
backward = id
instance ConvertBothWay Key MXKey where
forward (MXKey f MXMinor) = Key f Minor
forward (MXKey f MXMajor) = Key f Major
backward (Key f Minor) = MXKey f MXMinor
backward (Key f Major) = MXKey f MXMajor
instance ConvertBothWay Clef MXClef where
forward (MXClef sign line Nothing) = forward (MXClef sign line (Just 0))
forward (MXClef sign line (Just octalt)) = Clef sign line octalt
backward (Clef sign line octalt) = MXClef sign line moctalt
where moctalt = if octalt == 0 then Nothing else Just octalt
instance ConvertBothWay Timing MXTime where
forward (MXTime beat beattype anno) = Timing beat beattype anno
backward (Timing beat beattype anno) = MXTime beat beattype anno
-- 58: (C,5,-2) = (B,4,-1) = (AB_,4,0) = (A,4,+1) = (GA_,4,+2)
-- 54: (G,4,-1) = (FG_,4,0) = (F,4,1)
-- DEPENDS on key, xml-accidental
instance ConvertBothWay Pitch MXPitch where
forward (MXPitch s o (Just a)) = (12 * o) + (fromEnum s) + a
forward (MXPitch s o Nothing) = forward $ MXPitch s o (Just 0)
backward pitch = MXPitch step octave alter -- NOTE: Must consider case of octave changing if going above/below C
where step = toEnum $ (pitch `mod` 12) + (if altTrue then (-1) else 0)
octave = quot pitch 12
alter = if altTrue then Just 1 else Nothing
altTrue= case (toEnum $ pitch `mod` 12) of
CD_ -> True
DE_ -> True
FG_ -> True
GA_ -> True
AB_ -> True
_ -> False
----------------------------------------------------------------------------------------------------
-- Read/Show Instances
instance Show MXNoteType where
show a = case a of
Wn -> "whole"
Hn -> "half"
Qn -> "quarter"
En -> "eight"
Sn -> "16th"
Tn -> "32nd"
S64n->"64th"
On -> "128th"
instance Read MXNoteType where
readsPrec _ a = case a of
"whole" -> [(Wn, "")]
"half" -> [(Hn, "")]
"quarter" -> [(Qn, "")]
"eight" -> [(En, "")]
"16th" -> [(Sn, "")]
"32nd" -> [(Tn, "")]
"64th" -> [(S64n, "")]
"128th" -> [(On, "")]
_ -> []
instance Show MXMode where
show MXMajor = "major"
show MXMinor = "minor"
instance Read MXMode where
readsPrec _ "major" = [(MXMajor, "")]
readsPrec _ "minor" = [(MXMinor, "")]
readsPrec _ _ = []
|
nickgeoca/music-browserside
|
MXml.hs
|
bsd-3-clause
| 6,885 | 0 | 17 | 2,102 | 1,806 | 992 | 814 | 144 | 0 |
module Main (main) where
import Test.Framework (defaultMain)
import qualified Socket
import qualified Wire
main :: IO ()
main = defaultMain
[ Socket.tests
, Wire.tests
]
|
abooij/sudbury
|
tests/Main.hs
|
mit
| 179 | 0 | 7 | 34 | 55 | 33 | 22 | 8 | 1 |
{- | Module : ./GMP/Parser.hs
- Description : Implementation of logic formula parser
- Copyright : (c) Georgel Calin & Lutz Schroeder, DFKI Lab Bremen
- License : GPLv2 or higher, see LICENSE.txt
- Maintainer : [email protected]
- Stability : provisional
- Portability : portable
-
- Provides the implementation of the generic parser for the Boole datatype
-}
module Parser where
import Text.ParserCombinators.Parsec
-- import GenericSequent
import ModalLogic
import CombLogic
data ModalOperator = Sqr | Ang | None deriving Eq
{- | Main parser
par5er :: ModalOperator -> [GenParser Char st a] -> GenParser Char st (Boole a) -}
par5er = implFormula
{- | Parser which translates all implications in disjunctions & conjunctions
implFormula :: ModalOperator -> [GenParser Char st a] -> GenParser Char st (Boole a) -}
implFormula flag logics = do
f <- orFormula flag logics
option f (do string "->"
spaces
i <- implFormula flag logics
return $ Not (And f (Not i))
<|> do try (string "<->")
spaces
i <- implFormula flag logics
return $ And (Not (And f (Not i))) (Not (And (Not f) i))
<|> do string "<-"
spaces
i <- implFormula flag logics
return $ And (Not f) i
<|> return f
<?> "GMPParser.implFormula")
{- | Parser for disjunction - used for handling binding order
orFormula :: ModalOperator -> [GenParser Char st a] -> GenParser Char st (Boole a) -}
orFormula flag logics = do
f <- andFormula flag logics
option f $ do
string "\\/"
spaces
g <- orFormula flag logics
return $ Not (And (Not f) (Not g))
<?> "GMPParser.orFormula"
{- | Parser for conjunction - used for handling the binding order
andFormula :: ModalOperator -> [GenParser Char st a] -> GenParser Char st (Boole a) -}
andFormula flag logics = do
f <- primFormula flag logics
option f $ do
string "/\\"
spaces
g <- andFormula flag logics
return $ And f g
<?> "GMPParser.andFormula"
{- | Parse a primitive formula: T, F, ~f, <i>f, [i]f,
- where i stands for an index, f for a formula/boolean expression
primFormula :: ModalOperator -> [GenParser Char st a] -> GenParser Char st (Boole a) -}
primFormula flag logics = do string "T"
spaces
return T
<|> do string "F"
spaces
return F
<|> parenFormula flag logics
<|> do string "~"
spaces
f <- primFormula flag logics
return $ Not f
<|> atomFormula flag logics
<?> "GMPParser.primFormula"
-- modalAtom :: ModalOperator -> [Int] -> GenParser Char st (Boole a)
atomFormula flag logics = do char '<'
spaces
let h = head logics
let t = tail logics
case h of
1 -> do parseKindex
spaces
char '>'
spaces
f <- primFormula flag $ t ++ [h]
case flag of
-- FIXME: cannot construct the infinite type
Ang -> return $ At (K f) -- M i f
Sqr -> return $ Not (At (K (Not f)))
_ -> return $ At (K f)
{-
2 -> do parseKDindex
spaces
char '>'
spaces
f <- primFormula flag $ t ++ [h]
case flag of
Ang -> return $ At (KD f)--M i f
Sqr -> return $ Not (At (KD (Not f)))
_ -> return $ At (KD f)
_ -> do aux <- parseGindex
return aux
<|> do char '['
spaces
i <- head pa
spaces
char ']'
spaces
f <- primFormula flag $ tail pa ++ [head pa]
case flag of
Ang -> return $ Not (At (Not (Box i f)))
Sqr -> return $ At (Box i f)
_ -> return $ At (Box i f)
-}
{- | Parser for un-parenthesizing a formula
parenFormula :: ModalOperator -> [GenParser Char st a] -> GenParser Char st (Boole a) -}
parenFormula flag logics = do char '('
spaces
f <- par5er flag logics
spaces
char ')'
spaces
return f
<?> "GMPParser.parenFormula"
-- | Parse integer number
natural :: GenParser Char st Integer
natural = fmap read $ many1 digit
-- | Parser for Coalition Logic index
parseCindex :: Parser [Int]
parseCindex = do -- checks whether there are more numbers to be parsed
let stopParser = do char ','
return False
<|> do char '}'
return True
<?> "Parser.parseCindex.stop"
-- checks whether the index is of the for x1,..,x&
let normalParser l = do x <- natural
let n = fromInteger x
spaces
q <- stopParser
spaces
if q then normalParser (n : l)
else return (n : l)
<?> "Parser.parseCindex.normal"
char '{'
try (normalParser [])
<|> do -- checks whether the index is of the form "n..m"
let shortParser = do x <- natural
let n = fromInteger x
spaces
string ".."
spaces
y <- natural
let m = fromInteger y
return [n .. m]
<?> "Parser.parseCindex.short"
try shortParser
<?> "Parser.parseCindex"
-- | Parser for Graded Modal Logic index
parseGindex :: Parser Int
parseGindex = do n <- natural
return $ fromInteger n
<?> "Parser.parseGindex"
-- | Parser for Hennesy-Milner Modal Logic index
parseHMindex :: Parser Char
parseHMindex = letter
<?> "Parser.parseHMindex"
-- | Parser for K Modal Logic index
parseKindex :: Parser ()
parseKindex = return ()
-- | Parser for KD Modal Logic index
parseKDindex :: Parser ()
parseKDindex = return ()
-- | Parser for Probability Logic index
parsePindex :: Parser Rational
parsePindex =
do x <- natural
let auxP n = do char '/'
m <- natural
return $ toRational (fromInteger n / fromInteger m)
<|> do char '.'
m <- natural
let noDig n = let tmp = n < 10
in if tmp then 1
else 1 + noDig (div n 10)
let rat n = toRational (fromInteger n /
fromInteger (10 ^ noDig n))
let res = toRational n + rat m
return res
<|> return (toRational n)
<?> "Parser.parsePindex.auxP"
auxP x
-- | Parser for Monotonic Modal Logic index
parseMindex :: Parser ()
parseMindex = return ()
|
spechub/Hets
|
GMP/Parser.hs
|
gpl-2.0
| 8,697 | 5 | 23 | 4,407 | 1,359 | 621 | 738 | 135 | 3 |
-- C->Haskell Compiler: C attribute definitions and manipulation routines
--
-- Author : Manuel M. T. Chakravarty
-- Created: 12 August 99
--
-- Version $Revision: 1.1 $ from $Date: 2004/11/21 21:05:27 $
--
-- Copyright (c) [1999..2001] Manuel M. T. Chakravarty
--
-- This file is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This file is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
--- DESCRIPTION ---------------------------------------------------------------
--
-- This module provides the attributed version of the C structure tree.
--
-- * C has several name spaces of which two are represented in this module:
-- - `CObj' in `defObjsAC': The name space of objects, functions, typedef
-- names, and enum constants.
-- - `CTag' in `defTagsAC': The name space of tags of structures, unions,
-- and enumerations.
--
-- * The final state of the names spaces are preserved in the attributed
-- structure tree. This allows further fast lookups for globally defined
-- identifiers after the name anaysis is over.
--
-- * In addition to the name spaces, the attribute structure tree contains
-- a ident-definition table, which for attribute handles of identifiers
-- refers to the identifiers definition. These are only used in usage
-- occurences, except for one exception: The tag identifiers in forward
-- definitions of structures or enums get a reference to the corresponding
-- full definition - see `CTrav' for full details.
--
-- * We maintain a shadow definition table, it can be populated with aliases
-- to other objects and maps identifiers to identifiers. It is populated by
-- using the `applyPrefix' function. When looksup performed via the shadow
-- variant of a lookup function, shadow aliases are also considered, but
-- they are used only if no normal entry for the identifiers is present.
--
-- * Only ranges delimited by a block open a new range for tags (see
-- `enterNewObjRangeC' and `leaveObjRangeC').
--
--- DOCU ----------------------------------------------------------------------
--
-- language: Haskell 98
--
--- TODO ----------------------------------------------------------------------
--
module CAttrs (-- attributed C
--
AttrC, attrC, getCHeader, enterNewRangeC, enterNewObjRangeC,
leaveRangeC, leaveObjRangeC, addDefObjC, lookupDefObjC,
lookupDefObjCShadow, addDefTagC, lookupDefTagC,
lookupDefTagCShadow, applyPrefix, getDefOfIdentC,
setDefOfIdentC, updDefOfIdentC, freezeDefOfIdentsAttrC,
softenDefOfIdentsAttrC,
--
-- C objects
--
CObj(..), CTag(..), CDef(..))
where
import Data.Char (toUpper)
import Data.List (isPrefixOf)
import Data.Maybe (mapMaybe)
import Position (Position, Pos(posOf), nopos, dontCarePos, builtinPos)
import Errors (interr)
import Idents (Ident, getIdentAttrs, identToLexeme, onlyPosIdent)
import Attributes (Attr(..), AttrTable, getAttr, setAttr, updAttr,
newAttrTable, freezeAttrTable, softenAttrTable)
import NameSpaces (NameSpace, nameSpace, enterNewRange, leaveRange, defLocal,
defGlobal, find, nameSpaceToList)
import Binary (Binary(..), putByte, getByte)
import CAST
-- attributed C structure tree
-- ---------------------------
-- C unit together with the attributes relevant to the outside world
-- (EXPORTED ABSTRACT)
--
data AttrC = AttrC {
headerAC :: CHeader, -- raw header
defObjsAC :: CObjNS, -- defined objects
defTagsAC :: CTagNS, -- defined tags
shadowsAC :: CShadowNS, -- shadow definitions (prefix)
defsAC :: CDefTable -- ident-def associations
}
-- make an attribute structure tree from a raw one (EXPORTED)
--
attrC :: CHeader -> AttrC
attrC header = AttrC {
headerAC = header,
defObjsAC = cObjNS,
defTagsAC = cTagNS,
shadowsAC = cShadowNS,
defsAC = cDefTable
}
-- extract the raw structure tree from an attributes one (EXPORTED)
--
getCHeader :: AttrC -> CHeader
getCHeader = headerAC
-- the name space operations
--
-- enter a new range (EXPORTED)
--
enterNewRangeC :: AttrC -> AttrC
enterNewRangeC ac = ac {
defObjsAC = enterNewRange . defObjsAC $ ac,
defTagsAC = enterNewRange . defTagsAC $ ac
}
-- enter a new range, only for objects (EXPORTED)
--
enterNewObjRangeC :: AttrC -> AttrC
enterNewObjRangeC ac = ac {
defObjsAC = enterNewRange . defObjsAC $ ac
}
-- leave the current range (EXPORTED)
--
leaveRangeC :: AttrC -> AttrC
leaveRangeC ac = ac {
defObjsAC = fst . leaveRange . defObjsAC $ ac,
defTagsAC = fst . leaveRange . defTagsAC $ ac
}
-- leave the current range, only for objects (EXPORTED)
--
leaveObjRangeC :: AttrC -> AttrC
leaveObjRangeC ac = ac {
defObjsAC = fst . leaveRange . defObjsAC $ ac
}
-- add another definitions to the object name space (EXPORTED)
--
-- * if a definition of the same name was already present, it is returned
--
addDefObjC :: AttrC -> Ident -> CObj -> (AttrC, Maybe CObj)
addDefObjC ac ide obj = let om = defObjsAC ac
(ac', obj') = defLocal om ide obj
in
(ac {defObjsAC = ac'}, obj')
-- lookup an identifier in the object name space (EXPORTED)
--
lookupDefObjC :: AttrC -> Ident -> Maybe CObj
lookupDefObjC ac ide = find (defObjsAC ac) ide
-- lookup an identifier in the object name space; if nothing found, try
-- whether there is a shadow identifier that matches (EXPORTED)
--
-- * the returned identifier is the _real_ identifier of the object
--
lookupDefObjCShadow :: AttrC -> Ident -> Maybe (CObj, Ident)
lookupDefObjCShadow ac ide =
case lookupDefObjC ac ide of
Just obj -> Just (obj, ide)
Nothing -> case find (shadowsAC ac) ide of
Nothing -> Nothing
Just ide' -> case lookupDefObjC ac ide' of
Just obj -> Just (obj, ide')
Nothing -> Nothing
-- add another definition to the tag name space (EXPORTED)
--
-- * if a definition of the same name was already present, it is returned
--
addDefTagC :: AttrC -> Ident -> CTag -> (AttrC, Maybe CTag)
addDefTagC ac ide obj = let tm = defTagsAC ac
(ac', obj') = defLocal tm ide obj
in
(ac {defTagsAC = ac'}, obj')
-- lookup an identifier in the tag name space (EXPORTED)
--
lookupDefTagC :: AttrC -> Ident -> Maybe CTag
lookupDefTagC ac ide = find (defTagsAC ac) ide
-- lookup an identifier in the tag name space; if nothing found, try
-- whether there is a shadow identifier that matches (EXPORTED)
--
-- * the returned identifier is the _real_ identifier of the tag
--
lookupDefTagCShadow :: AttrC -> Ident -> Maybe (CTag, Ident)
lookupDefTagCShadow ac ide =
case lookupDefTagC ac ide of
Just tag -> Just (tag, ide)
Nothing -> case find (shadowsAC ac) ide of
Nothing -> Nothing
Just ide' -> case lookupDefTagC ac ide' of
Just tag -> Just (tag, ide')
Nothing -> Nothing
-- enrich the shadow name space with identifiers obtained by dropping
-- the given prefix from the identifiers already in the object or tag name
-- space (EXPORTED)
--
-- * in case of a collisions, a random entry is selected
--
-- * case is not relevant in the prefix and underscores between the prefix and
-- the stem of an identifier are also dropped
--
applyPrefix :: AttrC -> String -> AttrC
applyPrefix ac prefix =
let
shadows = shadowsAC ac
names = map fst (nameSpaceToList (defObjsAC ac))
++ map fst (nameSpaceToList (defTagsAC ac))
newShadows = mapMaybe (strip prefix) names
in
ac {shadowsAC = foldl define shadows newShadows}
where
strip prefix ide = case eat prefix (identToLexeme ide) of
Nothing -> Nothing
Just "" -> Nothing
Just newName -> Just
(onlyPosIdent (posOf ide) newName,
ide)
--
eat [] ('_':cs) = eat [] cs
eat [] cs = Just cs
eat (p:prefix) (c:cs) | toUpper p == toUpper c = eat prefix cs
| otherwise = Nothing
eat _ _ = Nothing
--
define ns (ide, def) = fst (defGlobal ns ide def)
-- the attribute table operations on the attributes
--
-- get the definition associated with the given identifier (EXPORTED)
--
getDefOfIdentC :: AttrC -> Ident -> CDef
getDefOfIdentC ac = getAttr (defsAC ac) . getIdentAttrs
setDefOfIdentC :: AttrC -> Ident -> CDef -> AttrC
setDefOfIdentC ac id def =
let tot' = setAttr (defsAC ac) (getIdentAttrs id) def
in
ac {defsAC = tot'}
updDefOfIdentC :: AttrC -> Ident -> CDef -> AttrC
updDefOfIdentC ac id def =
let tot' = updAttr (defsAC ac) (getIdentAttrs id) def
in
ac {defsAC = tot'}
freezeDefOfIdentsAttrC :: AttrC -> AttrC
freezeDefOfIdentsAttrC ac = ac {defsAC = freezeAttrTable (defsAC ac)}
softenDefOfIdentsAttrC :: AttrC -> AttrC
softenDefOfIdentsAttrC ac = ac {defsAC = softenAttrTable (defsAC ac)}
-- C objects including operations
-- ------------------------------
-- C objects data definition (EXPORTED)
--
data CObj = TypeCO CDecl -- typedef declaration
| ObjCO CDecl -- object or function declaration
| EnumCO Ident CEnum -- enumerator
| BuiltinCO -- builtin object
-- two C objects are equal iff they are defined by the same structure
-- tree node (i.e., the two nodes referenced have the same attribute
-- identifier)
--
instance Eq CObj where
(TypeCO decl1 ) == (TypeCO decl2 ) = decl1 == decl2
(ObjCO decl1 ) == (ObjCO decl2 ) = decl1 == decl2
(EnumCO ide1 enum1) == (EnumCO ide2 enum2) = ide1 == ide2 && enum1 == enum2
_ == _ = False
instance Pos CObj where
posOf (TypeCO def ) = posOf def
posOf (ObjCO def ) = posOf def
posOf (EnumCO ide _) = posOf ide
posOf (BuiltinCO ) = builtinPos
-- C tagged objects including operations
-- -------------------------------------
-- C tagged objects data definition (EXPORTED)
--
data CTag = StructUnionCT CStructUnion -- toplevel struct-union declaration
| EnumCT CEnum -- toplevel enum declaration
-- two C tag objects are equal iff they are defined by the same structure
-- tree node (i.e., the two nodes referenced have the same attribute
-- identifier)
--
instance Eq CTag where
(StructUnionCT struct1) == (StructUnionCT struct2) = struct1 == struct2
(EnumCT enum1 ) == (EnumCT enum2 ) = enum1 == enum2
_ == _ = False
instance Pos CTag where
posOf (StructUnionCT def) = posOf def
posOf (EnumCT def) = posOf def
-- C general definition
-- --------------------
-- C general definition (EXPORTED)
--
data CDef = UndefCD -- undefined object
| DontCareCD -- don't care object
| ObjCD CObj -- C object
| TagCD CTag -- C tag
-- two C definitions are equal iff they are defined by the same structure
-- tree node (i.e., the two nodes referenced have the same attribute
-- identifier), but don't care objects are equal to everything and undefined
-- objects may not be compared
--
instance Eq CDef where
(ObjCD obj1) == (ObjCD obj2) = obj1 == obj2
(TagCD tag1) == (TagCD tag2) = tag1 == tag2
DontCareCD == _ = True
_ == DontCareCD = True
UndefCD == _ =
interr "CAttrs: Attempt to compare an undefined C definition!"
_ == UndefCD =
interr "CAttrs: Attempt to compare an undefined C definition!"
_ == _ = False
instance Attr CDef where
undef = UndefCD
dontCare = DontCareCD
isUndef UndefCD = True
isUndef _ = False
isDontCare DontCareCD = True
isDontCare _ = False
instance Pos CDef where
posOf UndefCD = nopos
posOf DontCareCD = dontCarePos
posOf (ObjCD obj) = posOf obj
posOf (TagCD tag) = posOf tag
-- object tables (internal use only)
-- ---------------------------------
-- the object name space
--
type CObjNS = NameSpace CObj
-- creating a new object name space
--
cObjNS :: CObjNS
cObjNS = nameSpace
-- the tag name space
--
type CTagNS = NameSpace CTag
-- creating a new tag name space
--
cTagNS :: CTagNS
cTagNS = nameSpace
-- the shadow name space
--
type CShadowNS = NameSpace Ident
-- creating a shadow name space
--
cShadowNS :: CShadowNS
cShadowNS = nameSpace
-- the general definition table
--
type CDefTable = AttrTable CDef
-- creating a new definition table
--
cDefTable :: CDefTable
cDefTable = newAttrTable "C General Definition Table for Idents"
{-! for AttrC derive : GhcBinary !-}
{-! for CObj derive : GhcBinary !-}
{-! for CTag derive : GhcBinary !-}
{-! for CDef derive : GhcBinary !-}
{-* Generated by DrIFT : Look, but Don't Touch. *-}
instance Binary AttrC where
put_ bh (AttrC aa ab ac ad ae) = do
-- put_ bh aa
put_ bh ab
put_ bh ac
put_ bh ad
put_ bh ae
get bh = do
-- aa <- get bh
ab <- get bh
ac <- get bh
ad <- get bh
ae <- get bh
return (AttrC (error "AttrC.headerAC should not be needed") ab ac ad ae)
instance Binary CObj where
put_ bh (TypeCO aa) = do
putByte bh 0
put_ bh aa
put_ bh (ObjCO ab) = do
putByte bh 1
put_ bh ab
put_ bh (EnumCO ac ad) = do
putByte bh 2
put_ bh ac
put_ bh ad
put_ bh BuiltinCO = do
putByte bh 3
get bh = do
h <- getByte bh
case h of
0 -> do
aa <- get bh
return (TypeCO aa)
1 -> do
ab <- get bh
return (ObjCO ab)
2 -> do
ac <- get bh
ad <- get bh
return (EnumCO ac ad)
3 -> do
return BuiltinCO
instance Binary CTag where
put_ bh (StructUnionCT aa) = do
putByte bh 0
put_ bh aa
put_ bh (EnumCT ab) = do
putByte bh 1
put_ bh ab
get bh = do
h <- getByte bh
case h of
0 -> do
aa <- get bh
return (StructUnionCT aa)
1 -> do
ab <- get bh
return (EnumCT ab)
instance Binary CDef where
put_ bh UndefCD = do
putByte bh 0
put_ bh DontCareCD = do
putByte bh 1
put_ bh (ObjCD aa) = do
putByte bh 2
put_ bh aa
put_ bh (TagCD ab) = do
putByte bh 3
put_ bh ab
get bh = do
h <- getByte bh
case h of
0 -> do
return UndefCD
1 -> do
return DontCareCD
2 -> do
aa <- get bh
return (ObjCD aa)
3 -> do
ab <- get bh
return (TagCD ab)
|
k0001/gtk2hs
|
tools/c2hs/c/CAttrs.hs
|
gpl-3.0
| 16,592 | 0 | 15 | 5,591 | 3,189 | 1,696 | 1,493 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.SNS.SetPlatformApplicationAttributes
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Sets the attributes of the platform application object for the supported
-- push notification services, such as APNS and GCM. For more information,
-- see
-- <http://docs.aws.amazon.com/sns/latest/dg/SNSMobilePush.html Using Amazon SNS Mobile Push Notifications>.
--
-- /See:/ <http://docs.aws.amazon.com/sns/latest/api/API_SetPlatformApplicationAttributes.html AWS API Reference> for SetPlatformApplicationAttributes.
module Network.AWS.SNS.SetPlatformApplicationAttributes
(
-- * Creating a Request
setPlatformApplicationAttributes
, SetPlatformApplicationAttributes
-- * Request Lenses
, spaaPlatformApplicationARN
, spaaAttributes
-- * Destructuring the Response
, setPlatformApplicationAttributesResponse
, SetPlatformApplicationAttributesResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.SNS.Types
import Network.AWS.SNS.Types.Product
-- | Input for SetPlatformApplicationAttributes action.
--
-- /See:/ 'setPlatformApplicationAttributes' smart constructor.
data SetPlatformApplicationAttributes = SetPlatformApplicationAttributes'
{ _spaaPlatformApplicationARN :: !Text
, _spaaAttributes :: !(Map Text Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'SetPlatformApplicationAttributes' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'spaaPlatformApplicationARN'
--
-- * 'spaaAttributes'
setPlatformApplicationAttributes
:: Text -- ^ 'spaaPlatformApplicationARN'
-> SetPlatformApplicationAttributes
setPlatformApplicationAttributes pPlatformApplicationARN_ =
SetPlatformApplicationAttributes'
{ _spaaPlatformApplicationARN = pPlatformApplicationARN_
, _spaaAttributes = mempty
}
-- | PlatformApplicationArn for SetPlatformApplicationAttributes action.
spaaPlatformApplicationARN :: Lens' SetPlatformApplicationAttributes Text
spaaPlatformApplicationARN = lens _spaaPlatformApplicationARN (\ s a -> s{_spaaPlatformApplicationARN = a});
-- | A map of the platform application attributes. Attributes in this map
-- include the following:
--
-- - 'PlatformCredential' -- The credential received from the
-- notification service. For APNS\/APNS_SANDBOX, PlatformCredential is
-- \"private key\". For GCM, PlatformCredential is \"API key\". For
-- ADM, PlatformCredential is \"client secret\".
-- - 'PlatformPrincipal' -- The principal received from the notification
-- service. For APNS\/APNS_SANDBOX, PlatformPrincipal is \"SSL
-- certificate\". For GCM, PlatformPrincipal is not applicable. For
-- ADM, PlatformPrincipal is \"client id\".
-- - 'EventEndpointCreated' -- Topic ARN to which EndpointCreated event
-- notifications should be sent.
-- - 'EventEndpointDeleted' -- Topic ARN to which EndpointDeleted event
-- notifications should be sent.
-- - 'EventEndpointUpdated' -- Topic ARN to which EndpointUpdate event
-- notifications should be sent.
-- - 'EventDeliveryFailure' -- Topic ARN to which DeliveryFailure event
-- notifications should be sent upon Direct Publish delivery failure
-- (permanent) to one of the application\'s endpoints.
spaaAttributes :: Lens' SetPlatformApplicationAttributes (HashMap Text Text)
spaaAttributes = lens _spaaAttributes (\ s a -> s{_spaaAttributes = a}) . _Map;
instance AWSRequest SetPlatformApplicationAttributes
where
type Rs SetPlatformApplicationAttributes =
SetPlatformApplicationAttributesResponse
request = postQuery sNS
response
= receiveNull
SetPlatformApplicationAttributesResponse'
instance ToHeaders SetPlatformApplicationAttributes
where
toHeaders = const mempty
instance ToPath SetPlatformApplicationAttributes
where
toPath = const "/"
instance ToQuery SetPlatformApplicationAttributes
where
toQuery SetPlatformApplicationAttributes'{..}
= mconcat
["Action" =:
("SetPlatformApplicationAttributes" :: ByteString),
"Version" =: ("2010-03-31" :: ByteString),
"PlatformApplicationArn" =:
_spaaPlatformApplicationARN,
"Attributes" =:
toQueryMap "entry" "key" "value" _spaaAttributes]
-- | /See:/ 'setPlatformApplicationAttributesResponse' smart constructor.
data SetPlatformApplicationAttributesResponse =
SetPlatformApplicationAttributesResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'SetPlatformApplicationAttributesResponse' with the minimum fields required to make a request.
--
setPlatformApplicationAttributesResponse
:: SetPlatformApplicationAttributesResponse
setPlatformApplicationAttributesResponse =
SetPlatformApplicationAttributesResponse'
|
fmapfmapfmap/amazonka
|
amazonka-sns/gen/Network/AWS/SNS/SetPlatformApplicationAttributes.hs
|
mpl-2.0
| 5,685 | 0 | 11 | 1,047 | 484 | 299 | 185 | 68 | 1 |
-- Copyright (c) 2014 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
module Tests.Text(tests) where
import Test.HUnitPlus.Base
import qualified Tests.Text.Format as Format
tests :: Test
tests = "Text" ~: [Format.tests]
|
saltlang/dynamic-pprint
|
test/Tests/Text.hs
|
bsd-3-clause
| 1,710 | 0 | 7 | 289 | 74 | 58 | 16 | 5 | 1 |
{-# LANGUAGE CPP, FlexibleInstances, OverloadedStrings, Rank2Types #-}
#ifdef GENERICS
{-# LANGUAGE DefaultSignatures, TypeOperators, KindSignatures, FlexibleContexts,
MultiParamTypeClasses, UndecidableInstances, ScopedTypeVariables #-}
#endif
module Data.Csv.Conversion
(
-- * Type conversion
Only(..)
, FromRecord(..)
, FromNamedRecord(..)
, ToNamedRecord(..)
, FromField(..)
, ToRecord(..)
, ToField(..)
-- * Parser
, Result(..)
, Parser
, parse
-- * Accessors
, (.!)
, (.:)
, (.=)
, record
, namedRecord
) where
import Control.Applicative
import Control.Monad
import Data.Attoparsec.Char8 (double, number, parseOnly)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString.Lazy as L
import qualified Data.HashMap.Lazy as HM
import Data.Int
import qualified Data.Map as M
import Data.Monoid
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.Encoding as LT
import Data.Traversable
import Data.Vector (Vector, (!))
import qualified Data.Vector as V
import Data.Word
import GHC.Float (double2Float)
import Prelude hiding (takeWhile)
import Data.Csv.Conversion.Internal
import Data.Csv.Types
#ifdef GENERICS
import GHC.Generics
import qualified Data.IntMap as IM
#endif
------------------------------------------------------------------------
-- Type conversion
------------------------------------------------------------------------
-- Index-based conversion
-- | A type that can be converted from a single CSV record, with the
-- possibility of failure.
--
-- When writing an instance, use 'empty', 'mzero', or 'fail' to make a
-- conversion fail, e.g. if a 'Record' has the wrong number of
-- columns.
--
-- Given this example data:
--
-- > John,56
-- > Jane,55
--
-- here's an example type and instance:
--
-- @data Person = Person { name :: Text, age :: Int }
--
-- instance FromRecord Person where
-- parseRecord v
-- | 'V.length' v == 2 = Person '<$>'
-- v '.!' 0 '<*>'
-- v '.!' 1
-- | otherwise = mzero
-- @
class FromRecord a where
parseRecord :: Record -> Parser a
#ifdef GENERICS
default parseRecord :: (Generic a, GFromRecord (Rep a)) => Record -> Parser a
parseRecord r = to <$> gparseRecord r
#endif
-- | Haskell lacks a single-element tuple type, so if you CSV data
-- with just one column you can use the 'Only' type to represent a
-- single-column result.
newtype Only a = Only {
fromOnly :: a
} deriving (Eq, Ord, Read, Show)
-- | A type that can be converted to a single CSV record.
--
-- An example type and instance:
--
-- @data Person = Person { name :: Text, age :: Int }
--
-- instance ToRecord Person where
-- toRecord (Person name age) = 'record' [
-- 'toField' name, 'toField' age]
-- @
--
-- Outputs data on this form:
--
-- > John,56
-- > Jane,55
class ToRecord a where
toRecord :: a -> Record
#ifdef GENERICS
default toRecord :: (Generic a, GToRecord (Rep a) Field) => a -> Record
toRecord = V.fromList . gtoRecord . from
#endif
instance FromField a => FromRecord (Only a) where
parseRecord v
| n == 1 = Only <$> parseField (V.unsafeIndex v 0)
| otherwise = lengthMismatch 1 v
where
n = V.length v
-- TODO: Check if we want all toRecord conversions to be stricter.
instance ToField a => ToRecord (Only a) where
toRecord = V.singleton . toField . fromOnly
instance (FromField a, FromField b) => FromRecord (a, b) where
parseRecord v
| n == 2 = (,) <$> parseField (V.unsafeIndex v 0)
<*> parseField (V.unsafeIndex v 1)
| otherwise = lengthMismatch 2 v
where
n = V.length v
instance (ToField a, ToField b) => ToRecord (a, b) where
toRecord (a, b) = V.fromList [toField a, toField b]
instance (FromField a, FromField b, FromField c) => FromRecord (a, b, c) where
parseRecord v
| n == 3 = (,,) <$> parseField (V.unsafeIndex v 0)
<*> parseField (V.unsafeIndex v 1)
<*> parseField (V.unsafeIndex v 2)
| otherwise = lengthMismatch 3 v
where
n = V.length v
instance (ToField a, ToField b, ToField c) =>
ToRecord (a, b, c) where
toRecord (a, b, c) = V.fromList [toField a, toField b, toField c]
instance (FromField a, FromField b, FromField c, FromField d) =>
FromRecord (a, b, c, d) where
parseRecord v
| n == 4 = (,,,) <$> parseField (V.unsafeIndex v 0)
<*> parseField (V.unsafeIndex v 1)
<*> parseField (V.unsafeIndex v 2)
<*> parseField (V.unsafeIndex v 3)
| otherwise = lengthMismatch 4 v
where
n = V.length v
instance (ToField a, ToField b, ToField c, ToField d) =>
ToRecord (a, b, c, d) where
toRecord (a, b, c, d) = V.fromList [
toField a, toField b, toField c, toField d]
instance (FromField a, FromField b, FromField c, FromField d, FromField e) =>
FromRecord (a, b, c, d, e) where
parseRecord v
| n == 5 = (,,,,) <$> parseField (V.unsafeIndex v 0)
<*> parseField (V.unsafeIndex v 1)
<*> parseField (V.unsafeIndex v 2)
<*> parseField (V.unsafeIndex v 3)
<*> parseField (V.unsafeIndex v 4)
| otherwise = lengthMismatch 5 v
where
n = V.length v
instance (ToField a, ToField b, ToField c, ToField d, ToField e) =>
ToRecord (a, b, c, d, e) where
toRecord (a, b, c, d, e) = V.fromList [
toField a, toField b, toField c, toField d, toField e]
instance (FromField a, FromField b, FromField c, FromField d, FromField e,
FromField f) =>
FromRecord (a, b, c, d, e, f) where
parseRecord v
| n == 6 = (,,,,,) <$> parseField (V.unsafeIndex v 0)
<*> parseField (V.unsafeIndex v 1)
<*> parseField (V.unsafeIndex v 2)
<*> parseField (V.unsafeIndex v 3)
<*> parseField (V.unsafeIndex v 4)
<*> parseField (V.unsafeIndex v 5)
| otherwise = lengthMismatch 6 v
where
n = V.length v
instance (ToField a, ToField b, ToField c, ToField d, ToField e, ToField f) =>
ToRecord (a, b, c, d, e, f) where
toRecord (a, b, c, d, e, f) = V.fromList [
toField a, toField b, toField c, toField d, toField e, toField f]
instance (FromField a, FromField b, FromField c, FromField d, FromField e,
FromField f, FromField g) =>
FromRecord (a, b, c, d, e, f, g) where
parseRecord v
| n == 7 = (,,,,,,) <$> parseField (V.unsafeIndex v 0)
<*> parseField (V.unsafeIndex v 1)
<*> parseField (V.unsafeIndex v 2)
<*> parseField (V.unsafeIndex v 3)
<*> parseField (V.unsafeIndex v 4)
<*> parseField (V.unsafeIndex v 5)
<*> parseField (V.unsafeIndex v 6)
| otherwise = lengthMismatch 7 v
where
n = V.length v
instance (ToField a, ToField b, ToField c, ToField d, ToField e, ToField f,
ToField g) =>
ToRecord (a, b, c, d, e, f, g) where
toRecord (a, b, c, d, e, f, g) = V.fromList [
toField a, toField b, toField c, toField d, toField e, toField f,
toField g]
lengthMismatch :: Int -> Record -> Parser a
lengthMismatch expected v =
fail $ "cannot unpack array of length " ++
show n ++ " into a " ++ desired ++ ". Input record: " ++
show v
where
n = V.length v
desired | expected == 1 = "Only"
| expected == 2 = "pair"
| otherwise = show expected ++ "-tuple"
instance FromField a => FromRecord [a] where
parseRecord = traverse parseField . V.toList
instance ToField a => ToRecord [a] where
toRecord = V.fromList . map toField
instance FromField a => FromRecord (V.Vector a) where
parseRecord = traverse parseField
instance ToField a => ToRecord (Vector a) where
toRecord = V.map toField
------------------------------------------------------------------------
-- Name-based conversion
-- | A type that can be converted from a single CSV record, with the
-- possibility of failure.
--
-- When writing an instance, use 'empty', 'mzero', or 'fail' to make a
-- conversion fail, e.g. if a 'Record' has the wrong number of
-- columns.
--
-- Given this example data:
--
-- > name,age
-- > John,56
-- > Jane,55
--
-- here's an example type and instance:
--
-- @{-\# LANGUAGE OverloadedStrings \#-}
--
-- data Person = Person { name :: Text, age :: Int }
--
-- instance FromRecord Person where
-- parseNamedRecord m = Person '<$>'
-- m '.:' \"name\" '<*>'
-- m '.:' \"age\"
-- @
--
-- Note the use of the @OverloadedStrings@ language extension which
-- enables 'B8.ByteString' values to be written as string literals.
class FromNamedRecord a where
parseNamedRecord :: NamedRecord -> Parser a
#ifdef GENERICS
default parseNamedRecord :: (Generic a, GFromNamedRecord (Rep a)) => NamedRecord -> Parser a
parseNamedRecord r = to <$> gparseNamedRecord r
#endif
-- | A type that can be converted to a single CSV record.
--
-- An example type and instance:
--
-- @data Person = Person { name :: Text, age :: Int }
--
-- instance ToRecord Person where
-- toNamedRecord (Person name age) = 'namedRecord' [
-- \"name\" '.=' name, \"age\" '.=' age]
-- @
class ToNamedRecord a where
toNamedRecord :: a -> NamedRecord
#ifdef GENERICS
default toNamedRecord :: (Generic a, GToRecord (Rep a) (B.ByteString, B.ByteString)) => a -> NamedRecord
toNamedRecord = namedRecord . gtoRecord . from
#endif
instance FromField a => FromNamedRecord (M.Map B.ByteString a) where
parseNamedRecord m = M.fromList <$>
(traverse parseSnd $ HM.toList m)
where parseSnd (name, s) = (,) <$> pure name <*> parseField s
instance ToField a => ToNamedRecord (M.Map B.ByteString a) where
toNamedRecord = HM.fromList . map (\ (k, v) -> (k, toField v)) . M.toList
instance FromField a => FromNamedRecord (HM.HashMap B.ByteString a) where
parseNamedRecord m = traverse (\ s -> parseField s) m
instance ToField a => ToNamedRecord (HM.HashMap B.ByteString a) where
toNamedRecord = HM.map toField
------------------------------------------------------------------------
-- Individual field conversion
-- | A type that can be converted from a single CSV field, with the
-- possibility of failure.
--
-- When writing an instance, use 'empty', 'mzero', or 'fail' to make a
-- conversion fail, e.g. if a 'Field' can't be converted to the given
-- type.
--
-- Example type and instance:
--
-- @{-\# LANGUAGE OverloadedStrings \#-}
--
-- data Color = Red | Green | Blue
--
-- instance FromField Color where
-- parseField s
-- | s == \"R\" = pure Red
-- | s == \"G\" = pure Green
-- | s == \"B\" = pure Blue
-- | otherwise = mzero
-- @
class FromField a where
parseField :: Field -> Parser a
-- | A type that can be converted to a single CSV field.
--
-- Example type and instance:
--
-- @{-\# LANGUAGE OverloadedStrings \#-}
--
-- data Color = Red | Green | Blue
--
-- instance ToField Color where
-- toField Red = \"R\"
-- toField Green = \"G\"
-- toField Blue = \"B\"
-- @
class ToField a where
toField :: a -> Field
instance FromField Char where
parseField s
| T.compareLength t 1 == EQ = pure (T.head t)
| otherwise = typeError "Char" s Nothing
where t = T.decodeUtf8 s
{-# INLINE parseField #-}
instance ToField Char where
toField = toField . T.encodeUtf8 . T.singleton
{-# INLINE toField #-}
instance FromField Double where
parseField = parseDouble
{-# INLINE parseField #-}
instance ToField Double where
toField = realFloat
{-# INLINE toField #-}
instance FromField Float where
parseField s = double2Float <$> parseDouble s
{-# INLINE parseField #-}
instance ToField Float where
toField = realFloat
{-# INLINE toField #-}
parseDouble :: B.ByteString -> Parser Double
parseDouble s = case parseOnly double s of
Left err -> typeError "Double" s (Just err)
Right n -> pure n
{-# INLINE parseDouble #-}
instance FromField Int where
parseField = parseIntegral "Int"
{-# INLINE parseField #-}
instance ToField Int where
toField = decimal
{-# INLINE toField #-}
instance FromField Integer where
parseField = parseIntegral "Integer"
{-# INLINE parseField #-}
instance ToField Integer where
toField = decimal
{-# INLINE toField #-}
instance FromField Int8 where
parseField = parseIntegral "Int8"
{-# INLINE parseField #-}
instance ToField Int8 where
toField = decimal
{-# INLINE toField #-}
instance FromField Int16 where
parseField = parseIntegral "Int16"
{-# INLINE parseField #-}
instance ToField Int16 where
toField = decimal
{-# INLINE toField #-}
instance FromField Int32 where
parseField = parseIntegral "Int32"
{-# INLINE parseField #-}
instance ToField Int32 where
toField = decimal
{-# INLINE toField #-}
instance FromField Int64 where
parseField = parseIntegral "Int64"
{-# INLINE parseField #-}
instance ToField Int64 where
toField = decimal
{-# INLINE toField #-}
instance FromField Word where
parseField = parseIntegral "Word"
{-# INLINE parseField #-}
instance ToField Word where
toField = decimal
{-# INLINE toField #-}
instance FromField Word8 where
parseField = parseIntegral "Word8"
{-# INLINE parseField #-}
instance ToField Word8 where
toField = decimal
{-# INLINE toField #-}
instance FromField Word16 where
parseField = parseIntegral "Word16"
{-# INLINE parseField #-}
instance ToField Word16 where
toField = decimal
{-# INLINE toField #-}
instance FromField Word32 where
parseField = parseIntegral "Word32"
{-# INLINE parseField #-}
instance ToField Word32 where
toField = decimal
{-# INLINE toField #-}
instance FromField Word64 where
parseField = parseIntegral "Word64"
{-# INLINE parseField #-}
instance ToField Word64 where
toField = decimal
{-# INLINE toField #-}
-- TODO: Optimize
escape :: B.ByteString -> B.ByteString
escape s
| B.find (\ b -> b == dquote || b == comma || b == nl || b == cr ||
b == sp) s == Nothing = s
| otherwise =
B.concat ["\"",
B.concatMap
(\ b -> if b == dquote then "\"\"" else B.singleton b) s,
"\""]
where
dquote = 34
comma = 44
nl = 10
cr = 13
sp = 32
instance FromField B.ByteString where
parseField = pure
{-# INLINE parseField #-}
instance ToField B.ByteString where
toField = escape
{-# INLINE toField #-}
instance FromField L.ByteString where
parseField s = pure (L.fromChunks [s])
{-# INLINE parseField #-}
instance ToField L.ByteString where
toField = toField . B.concat . L.toChunks
{-# INLINE toField #-}
-- | Assumes UTF-8 encoding.
instance FromField T.Text where
parseField = pure . T.decodeUtf8
{-# INLINE parseField #-}
-- | Uses UTF-8 encoding.
instance ToField T.Text where
toField = toField . T.encodeUtf8
{-# INLINE toField #-}
-- | Assumes UTF-8 encoding.
instance FromField LT.Text where
parseField s = pure (LT.fromChunks [T.decodeUtf8 s])
{-# INLINE parseField #-}
-- | Uses UTF-8 encoding.
instance ToField LT.Text where
toField = toField . B.concat . L.toChunks . LT.encodeUtf8
{-# INLINE toField #-}
-- | Assumes UTF-8 encoding.
instance FromField [Char] where
parseField = fmap T.unpack . parseField
{-# INLINE parseField #-}
-- | Uses UTF-8 encoding.
instance ToField [Char] where
toField = toField . T.pack
{-# INLINE toField #-}
parseIntegral :: Integral a => String -> B.ByteString -> Parser a
parseIntegral typ s = case parseOnly number s of
Left err -> typeError typ s (Just err)
Right n -> pure (floor n)
{-# INLINE parseIntegral #-}
typeError :: String -> B.ByteString -> Maybe String -> Parser a
typeError typ s mmsg =
fail $ "expected " ++ typ ++ ", got " ++ show (B8.unpack s) ++ cause
where
cause = case mmsg of
Just msg -> " (" ++ msg ++ ")"
Nothing -> ""
------------------------------------------------------------------------
-- Constructors and accessors
-- | Retrieve the /n/th field in the given record. The result is
-- 'empty' if the value cannot be converted to the desired type.
-- Raises an exception if the index is out of bounds.
(.!) :: FromField a => Record -> Int -> Parser a
v .! idx = parseField (v ! idx)
{-# INLINE (.!) #-}
-- | Retrieve a field in the given record by name. The result is
-- 'empty' if the field is missing or if the value cannot be converted
-- to the desired type.
(.:) :: FromField a => NamedRecord -> B.ByteString -> Parser a
m .: name = maybe (fail err) parseField $ HM.lookup name m
where err = "no field named " ++ show (B8.unpack name)
{-# INLINE (.:) #-}
-- | Construct a pair from a name and a value. For use with
-- 'namedRecord'.
(.=) :: ToField a => B.ByteString -> a -> (B.ByteString, B.ByteString)
name .= val = (name, toField val)
{-# INLINE (.=) #-}
-- | Construct a record from a list of 'B.ByteString's. Use 'toField'
-- to convert values to 'B.ByteString's for use with 'record'.
record :: [B.ByteString] -> Record
record = V.fromList
-- | Construct a named record from a list of name-value 'B.ByteString'
-- pairs. Use '.=' to construct such a pair from a name and a value.
namedRecord :: [(B.ByteString, B.ByteString)] -> NamedRecord
namedRecord = HM.fromList
------------------------------------------------------------------------
-- Parser for converting records to data types
-- | The result of running a 'Parser'.
data Result a = Error String
| Success a
deriving (Eq, Show)
instance Functor Result where
fmap f (Success a) = Success (f a)
fmap _ (Error err) = Error err
{-# INLINE fmap #-}
instance Monad Result where
return = Success
{-# INLINE return #-}
Success a >>= k = k a
Error err >>= _ = Error err
{-# INLINE (>>=) #-}
instance Applicative Result where
pure = return
{-# INLINE pure #-}
(<*>) = ap
{-# INLINE (<*>) #-}
instance MonadPlus Result where
mzero = fail "mzero"
{-# INLINE mzero #-}
mplus a@(Success _) _ = a
mplus _ b = b
{-# INLINE mplus #-}
instance Alternative Result where
empty = mzero
{-# INLINE empty #-}
(<|>) = mplus
{-# INLINE (<|>) #-}
instance Monoid (Result a) where
mempty = fail "mempty"
{-# INLINE mempty #-}
mappend = mplus
{-# INLINE mappend #-}
-- | Failure continuation.
type Failure f r = String -> f r
-- | Success continuation.
type Success a f r = a -> f r
-- | Conversion of a field to a value might fail e.g. if the field is
-- malformed. This possibility is captured by the 'Parser' type, which
-- lets you compose several field conversions together in such a way
-- that if any of them fail, the whole record conversion fails.
newtype Parser a = Parser {
runParser :: forall f r.
Failure f r
-> Success a f r
-> f r
}
instance Monad Parser where
m >>= g = Parser $ \kf ks -> let ks' a = runParser (g a) kf ks
in runParser m kf ks'
{-# INLINE (>>=) #-}
return a = Parser $ \_kf ks -> ks a
{-# INLINE return #-}
fail msg = Parser $ \kf _ks -> kf msg
{-# INLINE fail #-}
instance Functor Parser where
fmap f m = Parser $ \kf ks -> let ks' a = ks (f a)
in runParser m kf ks'
{-# INLINE fmap #-}
instance Applicative Parser where
pure = return
{-# INLINE pure #-}
(<*>) = apP
{-# INLINE (<*>) #-}
instance Alternative Parser where
empty = fail "empty"
{-# INLINE empty #-}
(<|>) = mplus
{-# INLINE (<|>) #-}
instance MonadPlus Parser where
mzero = fail "mzero"
{-# INLINE mzero #-}
mplus a b = Parser $ \kf ks -> let kf' _ = runParser b kf ks
in runParser a kf' ks
{-# INLINE mplus #-}
instance Monoid (Parser a) where
mempty = fail "mempty"
{-# INLINE mempty #-}
mappend = mplus
{-# INLINE mappend #-}
apP :: Parser (a -> b) -> Parser a -> Parser b
apP d e = do
b <- d
a <- e
return (b a)
{-# INLINE apP #-}
-- | Run a 'Parser'.
parse :: Parser a -> Result a
parse p = runParser p Error Success
{-# INLINE parse #-}
#ifdef GENERICS
class GFromRecord f where
gparseRecord :: Record -> Parser (f p)
instance GFromRecordSum f Record => GFromRecord (M1 i n f) where
gparseRecord v =
case (IM.lookup n gparseRecordSum) of
Nothing -> lengthMismatch n v
Just p -> M1 <$> p v
where
n = V.length v
class GFromNamedRecord f where
gparseNamedRecord :: NamedRecord -> Parser (f p)
instance GFromRecordSum f NamedRecord => GFromNamedRecord (M1 i n f) where
gparseNamedRecord v =
foldr (\f p -> p <|> M1 <$> f v) empty (IM.elems gparseRecordSum)
class GFromRecordSum f r where
gparseRecordSum :: IM.IntMap (r -> Parser (f p))
instance (GFromRecordSum a r, GFromRecordSum b r) => GFromRecordSum (a :+: b) r where
gparseRecordSum =
IM.unionWith (\a b r -> a r <|> b r)
(fmap (L1 <$>) <$> gparseRecordSum)
(fmap (R1 <$>) <$> gparseRecordSum)
instance GFromRecordProd f r => GFromRecordSum (M1 i n f) r where
gparseRecordSum = IM.singleton n (fmap (M1 <$>) f)
where
(n, f) = gparseRecordProd 0
class GFromRecordProd f r where
gparseRecordProd :: Int -> (Int, r -> Parser (f p))
instance GFromRecordProd U1 r where
gparseRecordProd n = (n, const (pure U1))
instance (GFromRecordProd a r, GFromRecordProd b r) => GFromRecordProd (a :*: b) r where
gparseRecordProd n0 = (n2, f)
where
f r = (:*:) <$> fa r <*> fb r
(n1, fa) = gparseRecordProd n0
(n2, fb) = gparseRecordProd n1
instance GFromRecordProd f Record => GFromRecordProd (M1 i n f) Record where
gparseRecordProd n = fmap (M1 <$>) <$> gparseRecordProd n
instance FromField a => GFromRecordProd (K1 i a) Record where
gparseRecordProd n = (n + 1, \v -> K1 <$> parseField (V.unsafeIndex v n))
data Proxy s (f :: * -> *) a = Proxy
instance (FromField a, Selector s) => GFromRecordProd (M1 S s (K1 i a)) NamedRecord where
gparseRecordProd n = (n + 1, \v -> (M1 . K1) <$> v .: name)
where
name = T.encodeUtf8 (T.pack (selName (Proxy :: Proxy s f a)))
class GToRecord a f where
gtoRecord :: a p -> [f]
instance GToRecord U1 f where
gtoRecord U1 = []
instance (GToRecord a f, GToRecord b f) => GToRecord (a :*: b) f where
gtoRecord (a :*: b) = gtoRecord a ++ gtoRecord b
instance (GToRecord a f, GToRecord b f) => GToRecord (a :+: b) f where
gtoRecord (L1 a) = gtoRecord a
gtoRecord (R1 b) = gtoRecord b
instance GToRecord a f => GToRecord (M1 D c a) f where
gtoRecord (M1 a) = gtoRecord a
instance GToRecord a f => GToRecord (M1 C c a) f where
gtoRecord (M1 a) = gtoRecord a
instance GToRecord a Field => GToRecord (M1 S c a) Field where
gtoRecord (M1 a) = gtoRecord a
instance ToField a => GToRecord (K1 i a) Field where
gtoRecord (K1 a) = [toField a]
instance (ToField a, Selector s) => GToRecord (M1 S s (K1 i a)) (B.ByteString, B.ByteString) where
gtoRecord m@(M1 (K1 a)) = [T.encodeUtf8 (T.pack (selName m)) .= toField a]
#endif
|
basvandijk/cassava
|
Data/Csv/Conversion.hs
|
bsd-3-clause
| 24,226 | 0 | 20 | 6,564 | 6,578 | 3,519 | 3,059 | 413 | 2 |
{-# LANGUAGE OverloadedStrings #-}
import Network.HaskellNet.IMAP.SSL
import Network.HaskellNet.SMTP.SSL as SMTP
import Network.HaskellNet.Auth (AuthType(LOGIN))
import qualified Data.ByteString.Char8 as B
username = "[email protected]"
password = "password"
recipient = "[email protected]"
imapTest = do
c <- connectIMAPSSLWithSettings "imap.gmail.com" cfg
login c username password
mboxes <- list c
mapM_ print mboxes
select c "INBOX"
msgs <- search c [ALLs]
let firstMsg = head msgs
msgContent <- fetch c firstMsg
B.putStrLn msgContent
logout c
where cfg = defaultSettingsIMAPSSL { sslMaxLineLength = 100000 }
smtpTest = doSMTPSTARTTLS "smtp.gmail.com" $ \c -> do
authSucceed <- SMTP.authenticate LOGIN username password c
if authSucceed
then sendPlainTextMail recipient username subject body c
else print "Authentication error."
where subject = "Test message"
body = "This is a test message"
main :: IO ()
main = smtpTest >> imapTest >> return ()
|
lemol/HaskellNet-SSL
|
examples/gmail.hs
|
bsd-3-clause
| 1,068 | 0 | 11 | 241 | 278 | 142 | 136 | 29 | 2 |
-- | Module for getting stock prices.
module B1.Data.Price
( Price (..)
, getPriceChange
, getWeeklyPrices
) where
import Data.List
import Data.Time
import Data.Time.Calendar.WeekDate
-- | Price information during some time interval.
data Price = Price
{ startTime :: LocalTime -- ^ Start time of the trading period
, endTime :: LocalTime -- ^ End time of the trading period
, open :: Float -- ^ Opening price of the trading period
, high :: Float -- ^ Highest price during the trading period
, low :: Float -- ^ Lowest price during the trading period
, close :: Float -- ^ Closing price of the trading period.
, volume :: Int -- ^ Volume of the trading period.
} deriving (Show, Eq)
getPriceChange :: [Price] -> Int -> Float
getPriceChange prices index
| index + 1 < length prices = change
| otherwise = 0
where
currClose = close $ prices !! index
prevClose = close $ prices !! (index + 1)
change = currClose - prevClose
getWeeklyPrices :: [Price] -> [Price]
getWeeklyPrices dailyPrices =
map flattenWeeklyPriceGroup $ getWeeklyPriceGroups dailyPrices
getWeeklyPriceGroups :: [Price] -> [[Price]]
getWeeklyPriceGroups dailyPrices =
groupBy sameWeekNumber dailyPrices
sameWeekNumber :: Price -> Price -> Bool
sameWeekNumber price otherPrice =
let week = getWeekNumber price
otherWeek = getWeekNumber otherPrice
in week == otherWeek
getWeekNumber :: Price -> Int
getWeekNumber price = weekNumber
where
day = localDay $ endTime price
(_, weekNumber, _) = toWeekDate day
flattenWeeklyPriceGroup :: [Price] -> Price
flattenWeeklyPriceGroup prices =
Price
{ startTime = flatStartTime
, endTime = flatEndTime
, open = flatOpen
, high = flatHigh
, low = flatLow
, close = flatClose
, volume = flatVolume
}
where
flatStartTime = startTime $ last prices
flatEndTime = endTime $ head prices
flatOpen = open $ last prices
flatHigh = maximum $ map high prices
flatLow = minimum $ map low prices
flatClose = close $ head prices
flatVolume = sum $ map volume prices
|
madjestic/b1
|
src/B1/Data/Price.hs
|
bsd-3-clause
| 2,092 | 0 | 9 | 462 | 524 | 293 | 231 | 55 | 1 |
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "src/Data/Text/Lazy/Builder/Scientific.hs" #-}
{-# LANGUAGE CPP, OverloadedStrings #-}
module Data.Text.Lazy.Builder.Scientific
( scientificBuilder
, formatScientificBuilder
, FPFormat(..)
) where
import Data.Scientific (Scientific)
import qualified Data.Scientific as Scientific
import Data.Text.Lazy.Builder.RealFloat (FPFormat(..))
import Data.Text.Lazy.Builder (Builder, fromString, singleton, fromText)
import Data.Text.Lazy.Builder.Int (decimal)
import qualified Data.Text as T (replicate)
import Utils (roundTo, i2d)
import Data.Monoid ((<>))
-- | A @Text@ @Builder@ which renders a scientific number to full
-- precision, using standard decimal notation for arguments whose
-- absolute value lies between @0.1@ and @9,999,999@, and scientific
-- notation otherwise.
scientificBuilder :: Scientific -> Builder
scientificBuilder = formatScientificBuilder Generic Nothing
-- | Like 'scientificBuilder' but provides rendering options.
formatScientificBuilder :: FPFormat
-> Maybe Int -- ^ Number of decimal places to render.
-> Scientific
-> Builder
formatScientificBuilder fmt decs scntfc
| scntfc < 0 = singleton '-' <> doFmt fmt (Scientific.toDecimalDigits (-scntfc))
| otherwise = doFmt fmt (Scientific.toDecimalDigits scntfc)
where
doFmt format (is, e) =
let ds = map i2d is in
case format of
Generic ->
doFmt (if e < 0 || e > 7 then Exponent else Fixed)
(is,e)
Exponent ->
case decs of
Nothing ->
let show_e' = decimal (e-1) in
case ds of
"0" -> "0.0e0"
[d] -> singleton d <> ".0e" <> show_e'
(d:ds') -> singleton d <> singleton '.' <> fromString ds' <> singleton 'e' <> show_e'
[] -> error $ "Data.Text.Lazy.Builder.Scientific.formatScientificBuilder" ++
"/doFmt/Exponent: []"
Just dec ->
let dec' = max dec 1 in
case is of
[0] -> "0." <> fromText (T.replicate dec' "0") <> "e0"
_ ->
let
(ei,is') = roundTo (dec'+1) is
(d:ds') = map i2d (if ei > 0 then init is' else is')
in
singleton d <> singleton '.' <> fromString ds' <> singleton 'e' <> decimal (e-1+ei)
Fixed ->
let
mk0 ls = case ls of { "" -> "0" ; _ -> fromString ls}
in
case decs of
Nothing
| e <= 0 -> "0." <> fromText (T.replicate (-e) "0") <> fromString ds
| otherwise ->
let
f 0 s rs = mk0 (reverse s) <> singleton '.' <> mk0 rs
f n s "" = f (n-1) ('0':s) ""
f n s (r:rs) = f (n-1) (r:s) rs
in
f e "" ds
Just dec ->
let dec' = max dec 0 in
if e >= 0 then
let
(ei,is') = roundTo (dec' + e) is
(ls,rs) = splitAt (e+ei) (map i2d is')
in
mk0 ls <> (if null rs then "" else singleton '.' <> fromString rs)
else
let
(ei,is') = roundTo dec' (replicate (-e) 0 ++ is)
d:ds' = map i2d (if ei > 0 then is' else 0:is')
in
singleton d <> (if null ds' then "" else singleton '.' <> fromString ds')
|
phischu/fragnix
|
tests/packages/scotty/Data.Text.Lazy.Builder.Scientific.hs
|
bsd-3-clause
| 3,464 | 1 | 32 | 1,218 | 1,073 | 558 | 515 | 72 | 18 |
-- | Pretty printing of graphs.
{-# OPTIONS -fno-warn-tabs #-}
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and
-- detab the module (please do the detabbing in a separate patch). See
-- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
-- for details
module GraphPpr (
dumpGraph,
dotGraph
)
where
import GraphBase
import Outputable
import Unique
import UniqSet
import UniqFM
import Data.List
import Data.Maybe
-- | Pretty print a graph in a somewhat human readable format.
dumpGraph
:: (Outputable k, Outputable cls, Outputable color)
=> Graph k cls color -> SDoc
dumpGraph graph
= text "Graph"
$$ (vcat $ map dumpNode $ eltsUFM $ graphMap graph)
dumpNode
:: (Outputable k, Outputable cls, Outputable color)
=> Node k cls color -> SDoc
dumpNode node
= text "Node " <> ppr (nodeId node)
$$ text "conflicts "
<> parens (int (sizeUniqSet $ nodeConflicts node))
<> text " = "
<> ppr (nodeConflicts node)
$$ text "exclusions "
<> parens (int (sizeUniqSet $ nodeExclusions node))
<> text " = "
<> ppr (nodeExclusions node)
$$ text "coalesce "
<> parens (int (sizeUniqSet $ nodeCoalesce node))
<> text " = "
<> ppr (nodeCoalesce node)
$$ space
-- | Pretty print a graph in graphviz .dot format.
-- Conflicts get solid edges.
-- Coalescences get dashed edges.
dotGraph
:: ( Uniquable k
, Outputable k, Outputable cls, Outputable color)
=> (color -> SDoc) -- ^ What graphviz color to use for each node color
-- It's usually safe to return X11 style colors here,
-- ie "red", "green" etc or a hex triplet #aaff55 etc
-> Triv k cls color
-> Graph k cls color -> SDoc
dotGraph colorMap triv graph
= let nodes = eltsUFM $ graphMap graph
in vcat
( [ text "graph G {" ]
++ map (dotNode colorMap triv) nodes
++ (catMaybes $ snd $ mapAccumL dotNodeEdges emptyUniqSet nodes)
++ [ text "}"
, space ])
dotNode :: ( Uniquable k
, Outputable k, Outputable cls, Outputable color)
=> (color -> SDoc)
-> Triv k cls color
-> Node k cls color -> SDoc
dotNode colorMap triv node
= let name = ppr $ nodeId node
cls = ppr $ nodeClass node
excludes
= hcat $ punctuate space
$ map (\n -> text "-" <> ppr n)
$ uniqSetToList $ nodeExclusions node
preferences
= hcat $ punctuate space
$ map (\n -> text "+" <> ppr n)
$ nodePreference node
expref = if and [isEmptyUniqSet (nodeExclusions node), null (nodePreference node)]
then empty
else text "\\n" <> (excludes <+> preferences)
-- if the node has been colored then show that,
-- otherwise indicate whether it looks trivially colorable.
color
| Just c <- nodeColor node
= text "\\n(" <> ppr c <> text ")"
| triv (nodeClass node) (nodeConflicts node) (nodeExclusions node)
= text "\\n(" <> text "triv" <> text ")"
| otherwise
= text "\\n(" <> text "spill?" <> text ")"
label = name <> text " :: " <> cls
<> expref
<> color
pcolorC = case nodeColor node of
Nothing -> text "style=filled fillcolor=white"
Just c -> text "style=filled fillcolor=" <> doubleQuotes (colorMap c)
pout = text "node [label=" <> doubleQuotes label <> space <> pcolorC <> text "]"
<> space <> doubleQuotes name
<> text ";"
in pout
-- | Nodes in the graph are doubly linked, but we only want one edge for each
-- conflict if the graphviz graph. Traverse over the graph, but make sure
-- to only print the edges for each node once.
dotNodeEdges
:: ( Uniquable k
, Outputable k, Outputable cls, Outputable color)
=> UniqSet k
-> Node k cls color
-> (UniqSet k, Maybe SDoc)
dotNodeEdges visited node
| elementOfUniqSet (nodeId node) visited
= ( visited
, Nothing)
| otherwise
= let dconflicts
= map (dotEdgeConflict (nodeId node))
$ uniqSetToList
$ minusUniqSet (nodeConflicts node) visited
dcoalesces
= map (dotEdgeCoalesce (nodeId node))
$ uniqSetToList
$ minusUniqSet (nodeCoalesce node) visited
out = vcat dconflicts
$$ vcat dcoalesces
in ( addOneToUniqSet visited (nodeId node)
, Just out)
where dotEdgeConflict u1 u2
= doubleQuotes (ppr u1) <> text " -- " <> doubleQuotes (ppr u2)
<> text ";"
dotEdgeCoalesce u1 u2
= doubleQuotes (ppr u1) <> text " -- " <> doubleQuotes (ppr u2)
<> space <> text "[ style = dashed ];"
|
nomeata/ghc
|
compiler/utils/GraphPpr.hs
|
bsd-3-clause
| 4,392 | 243 | 22 | 996 | 1,371 | 723 | 648 | 114 | 3 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[Name]{@Name@: to transmit name info from renamer to typechecker}
-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE RecordWildCards #-}
-- |
-- #name_types#
-- GHC uses several kinds of name internally:
--
-- * 'OccName.OccName': see "OccName#name_types"
--
-- * 'RdrName.RdrName': see "RdrName#name_types"
--
-- * 'Name.Name' is the type of names that have had their scoping and binding resolved. They
-- have an 'OccName.OccName' but also a 'Unique.Unique' that disambiguates Names that have
-- the same 'OccName.OccName' and indeed is used for all 'Name.Name' comparison. Names
-- also contain information about where they originated from, see "Name#name_sorts"
--
-- * 'Id.Id': see "Id#name_types"
--
-- * 'Var.Var': see "Var#name_types"
--
-- #name_sorts#
-- Names are one of:
--
-- * External, if they name things declared in other modules. Some external
-- Names are wired in, i.e. they name primitives defined in the compiler itself
--
-- * Internal, if they name things in the module being compiled. Some internal
-- Names are system names, if they are names manufactured by the compiler
module Name (
-- * The main types
Name, -- Abstract
BuiltInSyntax(..),
-- ** Creating 'Name's
mkSystemName, mkSystemNameAt,
mkInternalName, mkClonedInternalName, mkDerivedInternalName,
mkSystemVarName, mkSysTvName,
mkFCallName,
mkExternalName, mkWiredInName,
-- ** Manipulating and deconstructing 'Name's
nameUnique, setNameUnique,
nameOccName, nameModule, nameModule_maybe,
setNameLoc,
tidyNameOcc,
localiseName,
mkLocalisedOccName,
nameSrcLoc, nameSrcSpan, pprNameDefnLoc, pprDefinedAt,
-- ** Predicates on 'Name's
isSystemName, isInternalName, isExternalName,
isTyVarName, isTyConName, isDataConName,
isValName, isVarName,
isWiredInName, isBuiltInSyntax,
isHoleName,
wiredInNameTyThing_maybe,
nameIsLocalOrFrom, nameIsHomePackageImport, nameIsFromExternalPackage,
stableNameCmp,
-- * Class 'NamedThing' and overloaded friends
NamedThing(..),
getSrcLoc, getSrcSpan, getOccString,
pprInfixName, pprPrefixName, pprModulePrefix,
nameStableString,
-- Re-export the OccName stuff
module OccName
) where
import {-# SOURCE #-} TypeRep( TyThing )
import {-# SOURCE #-} PrelNames( liftedTypeKindTyConKey )
import OccName
import Module
import SrcLoc
import Unique
import Util
import Maybes
import Binary
import DynFlags
import FastString
import Outputable
import Data.Data
{-
************************************************************************
* *
\subsection[Name-datatype]{The @Name@ datatype, and name construction}
* *
************************************************************************
-}
-- | A unique, unambigious name for something, containing information about where
-- that thing originated.
data Name = Name {
n_sort :: NameSort, -- What sort of name it is
n_occ :: !OccName, -- Its occurrence name
n_uniq :: {-# UNPACK #-} !Int,
n_loc :: !SrcSpan -- Definition site
}
deriving Typeable
-- NOTE: we make the n_loc field strict to eliminate some potential
-- (and real!) space leaks, due to the fact that we don't look at
-- the SrcLoc in a Name all that often.
data NameSort
= External Module
| WiredIn Module TyThing BuiltInSyntax
-- A variant of External, for wired-in things
| Internal -- A user-defined Id or TyVar
-- defined in the module being compiled
| System -- A system-defined Id or TyVar. Typically the
-- OccName is very uninformative (like 's')
instance Outputable NameSort where
ppr (External _) = text "external"
ppr (WiredIn _ _ _) = text "wired-in"
ppr Internal = text "internal"
ppr System = text "system"
-- | BuiltInSyntax is for things like @(:)@, @[]@ and tuples,
-- which have special syntactic forms. They aren't in scope
-- as such.
data BuiltInSyntax = BuiltInSyntax | UserSyntax
{-
Notes about the NameSorts:
1. Initially, top-level Ids (including locally-defined ones) get External names,
and all other local Ids get Internal names
2. In any invocation of GHC, an External Name for "M.x" has one and only one
unique. This unique association is ensured via the Name Cache;
see Note [The Name Cache] in IfaceEnv.
3. Things with a External name are given C static labels, so they finally
appear in the .o file's symbol table. They appear in the symbol table
in the form M.n. If originally-local things have this property they
must be made @External@ first.
4. In the tidy-core phase, a External that is not visible to an importer
is changed to Internal, and a Internal that is visible is changed to External
5. A System Name differs in the following ways:
a) has unique attached when printing dumps
b) unifier eliminates sys tyvars in favour of user provs where possible
Before anything gets printed in interface files or output code, it's
fed through a 'tidy' processor, which zaps the OccNames to have
unique names; and converts all sys-locals to user locals
If any desugarer sys-locals have survived that far, they get changed to
"ds1", "ds2", etc.
Built-in syntax => It's a syntactic form, not "in scope" (e.g. [])
Wired-in thing => The thing (Id, TyCon) is fully known to the compiler,
not read from an interface file.
E.g. Bool, True, Int, Float, and many others
All built-in syntax is for wired-in things.
-}
instance HasOccName Name where
occName = nameOccName
nameUnique :: Name -> Unique
nameOccName :: Name -> OccName
nameModule :: Name -> Module
nameSrcLoc :: Name -> SrcLoc
nameSrcSpan :: Name -> SrcSpan
nameUnique name = mkUniqueGrimily (n_uniq name)
nameOccName name = n_occ name
nameSrcLoc name = srcSpanStart (n_loc name)
nameSrcSpan name = n_loc name
{-
************************************************************************
* *
\subsection{Predicates on names}
* *
************************************************************************
-}
isInternalName :: Name -> Bool
isExternalName :: Name -> Bool
isSystemName :: Name -> Bool
isWiredInName :: Name -> Bool
isWiredInName (Name {n_sort = WiredIn _ _ _}) = True
isWiredInName _ = False
wiredInNameTyThing_maybe :: Name -> Maybe TyThing
wiredInNameTyThing_maybe (Name {n_sort = WiredIn _ thing _}) = Just thing
wiredInNameTyThing_maybe _ = Nothing
isBuiltInSyntax :: Name -> Bool
isBuiltInSyntax (Name {n_sort = WiredIn _ _ BuiltInSyntax}) = True
isBuiltInSyntax _ = False
isExternalName (Name {n_sort = External _}) = True
isExternalName (Name {n_sort = WiredIn _ _ _}) = True
isExternalName _ = False
isInternalName name = not (isExternalName name)
isHoleName :: Name -> Bool
isHoleName = isHoleModule . nameModule
nameModule name =
nameModule_maybe name `orElse`
pprPanic "nameModule" (ppr (n_sort name) <+> ppr name)
nameModule_maybe :: Name -> Maybe Module
nameModule_maybe (Name { n_sort = External mod}) = Just mod
nameModule_maybe (Name { n_sort = WiredIn mod _ _}) = Just mod
nameModule_maybe _ = Nothing
nameIsLocalOrFrom :: Module -> Name -> Bool
-- ^ Returns True if the name is
-- (a) Internal
-- (b) External but from the specified module
-- (c) External but from the 'interactive' package
--
-- The key idea is that
-- False means: the entity is defined in some other module
-- you can find the details (type, fixity, instances)
-- in some interface file
-- those details will be stored in the EPT or HPT
--
-- True means: the entity is defined in this module or earlier in
-- the GHCi session
-- you can find details (type, fixity, instances) in the
-- TcGblEnv or TcLclEnv
--
-- The isInteractiveModule part is because successive interactions of a GCHi session
-- each give rise to a fresh module (Ghci1, Ghci2, etc), but they all come
-- from the magic 'interactive' package; and all the details are kept in the
-- TcLclEnv, TcGblEnv, NOT in the HPT or EPT.
-- See Note [The interactive package] in HscTypes
nameIsLocalOrFrom from name
| Just mod <- nameModule_maybe name = from == mod || isInteractiveModule mod
| otherwise = True
nameIsHomePackageImport :: Module -> Name -> Bool
-- True if the Name is defined in module of this package
-- /other than/ the this_mod
nameIsHomePackageImport this_mod
= \nm -> case nameModule_maybe nm of
Nothing -> False
Just nm_mod -> nm_mod /= this_mod
&& moduleUnitId nm_mod == this_pkg
where
this_pkg = moduleUnitId this_mod
-- | Returns True if the Name comes from some other package: neither this
-- pacakge nor the interactive package.
nameIsFromExternalPackage :: UnitId -> Name -> Bool
nameIsFromExternalPackage this_pkg name
| Just mod <- nameModule_maybe name
, moduleUnitId mod /= this_pkg -- Not this package
, not (isInteractiveModule mod) -- Not the 'interactive' package
= True
| otherwise
= False
isTyVarName :: Name -> Bool
isTyVarName name = isTvOcc (nameOccName name)
isTyConName :: Name -> Bool
isTyConName name = isTcOcc (nameOccName name)
isDataConName :: Name -> Bool
isDataConName name = isDataOcc (nameOccName name)
isValName :: Name -> Bool
isValName name = isValOcc (nameOccName name)
isVarName :: Name -> Bool
isVarName = isVarOcc . nameOccName
isSystemName (Name {n_sort = System}) = True
isSystemName _ = False
{-
************************************************************************
* *
\subsection{Making names}
* *
************************************************************************
-}
-- | Create a name which is (for now at least) local to the current module and hence
-- does not need a 'Module' to disambiguate it from other 'Name's
mkInternalName :: Unique -> OccName -> SrcSpan -> Name
mkInternalName uniq occ loc = Name { n_uniq = getKey uniq
, n_sort = Internal
, n_occ = occ
, n_loc = loc }
-- NB: You might worry that after lots of huffing and
-- puffing we might end up with two local names with distinct
-- uniques, but the same OccName. Indeed we can, but that's ok
-- * the insides of the compiler don't care: they use the Unique
-- * when printing for -ddump-xxx you can switch on -dppr-debug to get the
-- uniques if you get confused
-- * for interface files we tidyCore first, which makes
-- the OccNames distinct when they need to be
mkClonedInternalName :: Unique -> Name -> Name
mkClonedInternalName uniq (Name { n_occ = occ, n_loc = loc })
= Name { n_uniq = getKey uniq, n_sort = Internal
, n_occ = occ, n_loc = loc }
mkDerivedInternalName :: (OccName -> OccName) -> Unique -> Name -> Name
mkDerivedInternalName derive_occ uniq (Name { n_occ = occ, n_loc = loc })
= Name { n_uniq = getKey uniq, n_sort = Internal
, n_occ = derive_occ occ, n_loc = loc }
-- | Create a name which definitely originates in the given module
mkExternalName :: Unique -> Module -> OccName -> SrcSpan -> Name
-- WATCH OUT! External Names should be in the Name Cache
-- (see Note [The Name Cache] in IfaceEnv), so don't just call mkExternalName
-- with some fresh unique without populating the Name Cache
mkExternalName uniq mod occ loc
= Name { n_uniq = getKey uniq, n_sort = External mod,
n_occ = occ, n_loc = loc }
-- | Create a name which is actually defined by the compiler itself
mkWiredInName :: Module -> OccName -> Unique -> TyThing -> BuiltInSyntax -> Name
mkWiredInName mod occ uniq thing built_in
= Name { n_uniq = getKey uniq,
n_sort = WiredIn mod thing built_in,
n_occ = occ, n_loc = wiredInSrcSpan }
-- | Create a name brought into being by the compiler
mkSystemName :: Unique -> OccName -> Name
mkSystemName uniq occ = mkSystemNameAt uniq occ noSrcSpan
mkSystemNameAt :: Unique -> OccName -> SrcSpan -> Name
mkSystemNameAt uniq occ loc = Name { n_uniq = getKey uniq, n_sort = System
, n_occ = occ, n_loc = loc }
mkSystemVarName :: Unique -> FastString -> Name
mkSystemVarName uniq fs = mkSystemName uniq (mkVarOccFS fs)
mkSysTvName :: Unique -> FastString -> Name
mkSysTvName uniq fs = mkSystemName uniq (mkOccNameFS tvName fs)
-- | Make a name for a foreign call
mkFCallName :: Unique -> String -> Name
mkFCallName uniq str = mkInternalName uniq (mkVarOcc str) noSrcSpan
-- The encoded string completely describes the ccall
-- When we renumber/rename things, we need to be
-- able to change a Name's Unique to match the cached
-- one in the thing it's the name of. If you know what I mean.
setNameUnique :: Name -> Unique -> Name
setNameUnique name uniq = name {n_uniq = getKey uniq}
-- This is used for hsigs: we want to use the name of the originally exported
-- entity, but edit the location to refer to the reexport site
setNameLoc :: Name -> SrcSpan -> Name
setNameLoc name loc = name {n_loc = loc}
tidyNameOcc :: Name -> OccName -> Name
-- We set the OccName of a Name when tidying
-- In doing so, we change System --> Internal, so that when we print
-- it we don't get the unique by default. It's tidy now!
tidyNameOcc name@(Name { n_sort = System }) occ = name { n_occ = occ, n_sort = Internal}
tidyNameOcc name occ = name { n_occ = occ }
-- | Make the 'Name' into an internal name, regardless of what it was to begin with
localiseName :: Name -> Name
localiseName n = n { n_sort = Internal }
-- |Create a localised variant of a name.
--
-- If the name is external, encode the original's module name to disambiguate.
-- SPJ says: this looks like a rather odd-looking function; but it seems to
-- be used only during vectorisation, so I'm not going to worry
mkLocalisedOccName :: Module -> (Maybe String -> OccName -> OccName) -> Name -> OccName
mkLocalisedOccName this_mod mk_occ name = mk_occ origin (nameOccName name)
where
origin
| nameIsLocalOrFrom this_mod name = Nothing
| otherwise = Just (moduleNameColons . moduleName . nameModule $ name)
{-
************************************************************************
* *
\subsection{Hashing and comparison}
* *
************************************************************************
-}
cmpName :: Name -> Name -> Ordering
cmpName n1 n2 = n_uniq n1 `compare` n_uniq n2
stableNameCmp :: Name -> Name -> Ordering
-- Compare lexicographically
stableNameCmp (Name { n_sort = s1, n_occ = occ1 })
(Name { n_sort = s2, n_occ = occ2 })
= (s1 `sort_cmp` s2) `thenCmp` (occ1 `compare` occ2)
-- The ordinary compare on OccNames is lexicographic
where
-- Later constructors are bigger
sort_cmp (External m1) (External m2) = m1 `stableModuleCmp` m2
sort_cmp (External {}) _ = LT
sort_cmp (WiredIn {}) (External {}) = GT
sort_cmp (WiredIn m1 _ _) (WiredIn m2 _ _) = m1 `stableModuleCmp` m2
sort_cmp (WiredIn {}) _ = LT
sort_cmp Internal (External {}) = GT
sort_cmp Internal (WiredIn {}) = GT
sort_cmp Internal Internal = EQ
sort_cmp Internal System = LT
sort_cmp System System = EQ
sort_cmp System _ = GT
{-
************************************************************************
* *
\subsection[Name-instances]{Instance declarations}
* *
************************************************************************
-}
instance Eq Name where
a == b = case (a `compare` b) of { EQ -> True; _ -> False }
a /= b = case (a `compare` b) of { EQ -> False; _ -> True }
instance Ord Name where
a <= b = case (a `compare` b) of { LT -> True; EQ -> True; GT -> False }
a < b = case (a `compare` b) of { LT -> True; EQ -> False; GT -> False }
a >= b = case (a `compare` b) of { LT -> False; EQ -> True; GT -> True }
a > b = case (a `compare` b) of { LT -> False; EQ -> False; GT -> True }
compare a b = cmpName a b
instance Uniquable Name where
getUnique = nameUnique
instance NamedThing Name where
getName n = n
instance Data Name where
-- don't traverse?
toConstr _ = abstractConstr "Name"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "Name"
{-
************************************************************************
* *
\subsection{Binary}
* *
************************************************************************
-}
instance Binary Name where
put_ bh name =
case getUserData bh of
UserData{ ud_put_name = put_name } -> put_name bh name
get bh =
case getUserData bh of
UserData { ud_get_name = get_name } -> get_name bh
{-
************************************************************************
* *
\subsection{Pretty printing}
* *
************************************************************************
-}
instance Outputable Name where
ppr name = pprName name
instance OutputableBndr Name where
pprBndr _ name = pprName name
pprInfixOcc = pprInfixName
pprPrefixOcc = pprPrefixName
pprName :: Name -> SDoc
pprName (Name {n_sort = sort, n_uniq = u, n_occ = occ})
= getPprStyle $ \ sty ->
case sort of
WiredIn mod _ builtin -> pprExternal sty uniq mod occ True builtin
External mod -> pprExternal sty uniq mod occ False UserSyntax
System -> pprSystem sty uniq occ
Internal -> pprInternal sty uniq occ
where uniq = mkUniqueGrimily u
pprExternal :: PprStyle -> Unique -> Module -> OccName -> Bool -> BuiltInSyntax -> SDoc
pprExternal sty uniq mod occ is_wired is_builtin
| codeStyle sty = ppr mod <> char '_' <> ppr_z_occ_name occ
-- In code style, always qualify
-- ToDo: maybe we could print all wired-in things unqualified
-- in code style, to reduce symbol table bloat?
| debugStyle sty = pp_mod <> ppr_occ_name occ
<> braces (hsep [if is_wired then ptext (sLit "(w)") else empty,
pprNameSpaceBrief (occNameSpace occ),
pprUnique uniq])
| BuiltInSyntax <- is_builtin = ppr_occ_name occ -- Never qualify builtin syntax
| otherwise = pprModulePrefix sty mod occ <> ppr_occ_name occ
where
pp_mod = sdocWithDynFlags $ \dflags ->
if gopt Opt_SuppressModulePrefixes dflags
then empty
else ppr mod <> dot
pprInternal :: PprStyle -> Unique -> OccName -> SDoc
pprInternal sty uniq occ
| codeStyle sty = pprUnique uniq
| debugStyle sty = ppr_occ_name occ <> braces (hsep [pprNameSpaceBrief (occNameSpace occ),
pprUnique uniq])
| dumpStyle sty = ppr_occ_name occ <> ppr_underscore_unique uniq
-- For debug dumps, we're not necessarily dumping
-- tidied code, so we need to print the uniques.
| otherwise = ppr_occ_name occ -- User style
-- Like Internal, except that we only omit the unique in Iface style
pprSystem :: PprStyle -> Unique -> OccName -> SDoc
pprSystem sty uniq occ
| codeStyle sty = pprUnique uniq
| debugStyle sty = ppr_occ_name occ <> ppr_underscore_unique uniq
<> braces (pprNameSpaceBrief (occNameSpace occ))
| otherwise = ppr_occ_name occ <> ppr_underscore_unique uniq
-- If the tidy phase hasn't run, the OccName
-- is unlikely to be informative (like 's'),
-- so print the unique
pprModulePrefix :: PprStyle -> Module -> OccName -> SDoc
-- Print the "M." part of a name, based on whether it's in scope or not
-- See Note [Printing original names] in HscTypes
pprModulePrefix sty mod occ = sdocWithDynFlags $ \dflags ->
if gopt Opt_SuppressModulePrefixes dflags
then empty
else
case qualName sty mod occ of -- See Outputable.QualifyName:
NameQual modname -> ppr modname <> dot -- Name is in scope
NameNotInScope1 -> ppr mod <> dot -- Not in scope
NameNotInScope2 -> ppr (moduleUnitId mod) <> colon -- Module not in
<> ppr (moduleName mod) <> dot -- scope either
NameUnqual -> empty -- In scope unqualified
ppr_underscore_unique :: Unique -> SDoc
-- Print an underscore separating the name from its unique
-- But suppress it if we aren't printing the uniques anyway
ppr_underscore_unique uniq
= sdocWithDynFlags $ \dflags ->
if gopt Opt_SuppressUniques dflags
then empty
else char '_' <> pprUnique uniq
ppr_occ_name :: OccName -> SDoc
ppr_occ_name occ = ftext (occNameFS occ)
-- Don't use pprOccName; instead, just print the string of the OccName;
-- we print the namespace in the debug stuff above
-- In code style, we Z-encode the strings. The results of Z-encoding each FastString are
-- cached behind the scenes in the FastString implementation.
ppr_z_occ_name :: OccName -> SDoc
ppr_z_occ_name occ = ztext (zEncodeFS (occNameFS occ))
-- Prints (if mod information is available) "Defined at <loc>" or
-- "Defined in <mod>" information for a Name.
pprDefinedAt :: Name -> SDoc
pprDefinedAt name = ptext (sLit "Defined") <+> pprNameDefnLoc name
pprNameDefnLoc :: Name -> SDoc
-- Prints "at <loc>" or
-- or "in <mod>" depending on what info is available
pprNameDefnLoc name
= case nameSrcLoc name of
-- nameSrcLoc rather than nameSrcSpan
-- It seems less cluttered to show a location
-- rather than a span for the definition point
RealSrcLoc s -> ptext (sLit "at") <+> ppr s
UnhelpfulLoc s
| isInternalName name || isSystemName name
-> ptext (sLit "at") <+> ftext s
| otherwise
-> ptext (sLit "in") <+> quotes (ppr (nameModule name))
-- | Get a string representation of a 'Name' that's unique and stable
-- across recompilations. Used for deterministic generation of binds for
-- derived instances.
-- eg. "$aeson_70dylHtv1FFGeai1IoxcQr$Data.Aeson.Types.Internal$String"
nameStableString :: Name -> String
nameStableString Name{..} =
nameSortStableString n_sort ++ "$" ++ occNameString n_occ
nameSortStableString :: NameSort -> String
nameSortStableString System = "$_sys"
nameSortStableString Internal = "$_in"
nameSortStableString (External mod) = moduleStableString mod
nameSortStableString (WiredIn mod _ _) = moduleStableString mod
{-
************************************************************************
* *
\subsection{Overloaded functions related to Names}
* *
************************************************************************
-}
-- | A class allowing convenient access to the 'Name' of various datatypes
class NamedThing a where
getOccName :: a -> OccName
getName :: a -> Name
getOccName n = nameOccName (getName n) -- Default method
getSrcLoc :: NamedThing a => a -> SrcLoc
getSrcSpan :: NamedThing a => a -> SrcSpan
getOccString :: NamedThing a => a -> String
getSrcLoc = nameSrcLoc . getName
getSrcSpan = nameSrcSpan . getName
getOccString = occNameString . getOccName
pprInfixName :: (Outputable a, NamedThing a) => a -> SDoc
-- See Outputable.pprPrefixVar, pprInfixVar;
-- add parens or back-quotes as appropriate
pprInfixName n = pprInfixVar (isSymOcc (getOccName n)) (ppr n)
pprPrefixName :: NamedThing a => a -> SDoc
pprPrefixName thing
| name `hasKey` liftedTypeKindTyConKey
= ppr name -- See Note [Special treatment for kind *]
| otherwise
= pprPrefixVar (isSymOcc (nameOccName name)) (ppr name)
where
name = getName thing
{-
Note [Special treatment for kind *]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Do not put parens around the kind '*'. Even though it looks like
an operator, it is really a special case.
This pprPrefixName stuff is really only used when printing HsSyn,
which has to be polymorphic in the name type, and hence has to go via
the overloaded function pprPrefixOcc. It's easier where we know the
type being pretty printed; eg the pretty-printing code in TypeRep.
See Trac #7645, which led to this.
-}
|
AlexanderPankiv/ghc
|
compiler/basicTypes/Name.hs
|
bsd-3-clause
| 26,295 | 0 | 16 | 7,485 | 4,321 | 2,325 | 1,996 | 318 | 11 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="el-GR">
<title>Tips and Tricks | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Ευρετήριο</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Αναζήτηση</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/tips/src/main/javahelp/org/zaproxy/zap/extension/tips/resources/help_el_GR/helpset_el_GR.hs
|
apache-2.0
| 1,001 | 78 | 66 | 160 | 449 | 226 | 223 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances #-}
{-# LANGUAGE PatternGuards #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.Stoppable
-- Copyright : (c) Anton Vorontsov <[email protected]> 2014
-- License : BSD-style (as xmonad)
--
-- Maintainer : Anton Vorontsov <[email protected]>
-- Stability : unstable
-- Portability : unportable
--
-- This module implements a special kind of layout modifier, which when
-- applied to a layout, causes xmonad to stop all non-visible processes.
-- In a way, this is a sledge-hammer for applications that drain power.
-- For example, given a web browser on a stoppable workspace, once the
-- workspace is hidden the web browser will be stopped.
--
-- Note that the stopped application won't be able to communicate with X11
-- clipboard. For this, the module actually stops applications after a
-- certain delay, giving a chance for a user to complete copy-paste
-- sequence. By default, the delay equals to 15 seconds, it is
-- configurable via 'Stoppable' constructor.
--
-- The stoppable modifier prepends a mark (by default equals to
-- \"Stoppable\") to the layout description (alternatively, you can choose
-- your own mark and use it with 'Stoppable' constructor). The stoppable
-- layout (identified by a mark) spans to multiple workspaces, letting you
-- to create groups of stoppable workspaces that only stop processes when
-- none of the workspaces are visible, and conversely, unfreezing all
-- processes even if one of the stoppable workspaces are visible.
--
-- To stop the process we use signals, which works for most cases. For
-- processes that tinker with signal handling (debuggers), another
-- (Linux-centric) approach may be used. See
-- <https://www.kernel.org/doc/Documentation/cgroups/freezer-subsystem.txt>
--
-----------------------------------------------------------------------------
module XMonad.Layout.Stoppable
( -- $usage
Stoppable(..)
, stoppable
) where
import XMonad
import XMonad.Actions.WithAll
import XMonad.Util.WindowProperties
import XMonad.Util.RemoteWindows
import XMonad.Util.Timer
import XMonad.StackSet hiding (filter)
import XMonad.Layout.LayoutModifier
import System.Posix.Signals
import Data.Maybe
import Control.Monad
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad
-- > import XMonad.Layout.Stoppable
-- >
-- > main = xmonad def
-- > { layoutHook = layoutHook def ||| stoppable (layoutHook def) }
--
-- Note that the module has to distinguish between local and remote
-- proccesses, which means that it needs to know the hostname, so it looks
-- for environment variables (e.g. HOST).
--
-- Environment variables will work for most cases, but won't work if the
-- hostname changes. To cover dynamic hostnames case, in addition to
-- layoutHook you have to provide manageHook from
-- "XMonad.Util.RemoteWindows" module.
--
-- For more detailed instructions on editing the layoutHook see:
--
-- "XMonad.Doc.Extending#Editing_the_layout_hook"
signalWindow :: Signal -> Window -> X ()
signalWindow s w = do
pid <- getProp32s "_NET_WM_PID" w
io $ (signalProcess s . fromIntegral) `mapM_` fromMaybe [] pid
signalLocalWindow :: Signal -> Window -> X ()
signalLocalWindow s w = isLocalWindow w >>= flip when (signalWindow s w)
withAllOn :: (a -> X ()) -> Workspace i l a -> X ()
withAllOn f wspc = f `mapM_` integrate' (stack wspc)
withAllFiltered :: (Workspace i l a -> Bool)
-> [Workspace i l a]
-> (a -> X ()) -> X ()
withAllFiltered p wspcs f = withAllOn f `mapM_` filter p wspcs
sigStoppableWorkspacesHook :: String -> X ()
sigStoppableWorkspacesHook k = do
ws <- gets windowset
withAllFiltered isStoppable (hidden ws) (signalLocalWindow sigSTOP)
where
isStoppable ws = k `elem` words (description $ layout ws)
-- | Data type for ModifiedLayout. The constructor lets you to specify a
-- custom mark/description modifier and a delay. You can also use
-- 'stoppable' helper function.
data Stoppable a = Stoppable
{ mark :: String
, delay :: Rational
, timer :: Maybe TimerId
} deriving (Show,Read)
instance LayoutModifier Stoppable Window where
modifierDescription = mark
hook _ = withAll $ signalLocalWindow sigCONT
handleMess (Stoppable m _ (Just tid)) msg
| Just ev <- fromMessage msg = handleTimer tid ev run
where run = sigStoppableWorkspacesHook m >> return Nothing
handleMess (Stoppable m d _) msg
| Just Hide <- fromMessage msg =
(Just . Stoppable m d . Just) `liftM` startTimer d
| otherwise = return Nothing
-- | Convert a layout to a stoppable layout using the default mark
-- (\"Stoppable\") and a delay of 15 seconds.
stoppable :: l a -> ModifiedLayout Stoppable l a
stoppable = ModifiedLayout (Stoppable "Stoppable" 15 Nothing)
|
pjones/xmonad-test
|
vendor/xmonad-contrib/XMonad/Layout/Stoppable.hs
|
bsd-2-clause
| 4,955 | 0 | 11 | 911 | 739 | 409 | 330 | 50 | 1 |
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module T11711 where
import Data.Kind (Type)
data (:~~:) (a :: k1) (b :: k2) where
HRefl :: a :~~: a
data TypeRep (a :: k) where
TrTyCon :: String -> TypeRep k -> TypeRep (a :: k)
TrApp :: forall k1 k2 (a :: k1 -> k2) (b :: k1).
TypeRep (a :: k1 -> k2)
-> TypeRep (b :: k1)
-> TypeRep (a b)
class Typeable (a :: k) where
typeRep :: TypeRep a
data SomeTypeRep where
SomeTypeRep :: forall k (a :: k). TypeRep a -> SomeTypeRep
eqTypeRep :: TypeRep a -> TypeRep b -> Maybe (a :~~: b)
eqTypeRep = undefined
typeRepKind :: forall k (a :: k). TypeRep a -> TypeRep k
typeRepKind = undefined
instance Typeable Type where
typeRep = TrTyCon "Type" typeRep
funResultTy :: SomeTypeRep -> SomeTypeRep -> Maybe SomeTypeRep
funResultTy (SomeTypeRep f) (SomeTypeRep x)
| Just HRefl <- (typeRep :: TypeRep Type) `eqTypeRep` typeRepKind f
, TRFun arg res <- f
, Just HRefl <- arg `eqTypeRep` x
= Just (SomeTypeRep res)
| otherwise
= Nothing
trArrow :: TypeRep (->)
trArrow = undefined
pattern TRFun :: forall fun. ()
=> forall arg res. (fun ~ (arg -> res))
=> TypeRep arg
-> TypeRep res
-> TypeRep fun
pattern TRFun arg res <- TrApp (TrApp (eqTypeRep trArrow -> Just HRefl) arg) res
|
shlevy/ghc
|
testsuite/tests/dependent/should_compile/T11711.hs
|
bsd-3-clause
| 1,611 | 0 | 13 | 401 | 521 | 287 | 234 | -1 | -1 |
module Compose where
import Prelude hiding (Monad, return )
-- | TODO
-- |
-- | 1. default methods are currently not supported
data ST s a = ST {runState :: s -> (a,s)}
{-@ data ST s a <r :: a -> Prop>
= ST (runState :: x:s -> (a<r>, s)) @-}
{-@ runState :: forall <r :: a -> Prop>. ST <r> s a -> x:s -> (a<r>, s) @-}
class Foo m where
return :: a -> m a
instance Foo (ST s) where
{-@ instance Foo ST s where
return :: forall s a. x:a -> ST <{\v -> x == v}> s a
@-}
return x = ST $ \s -> (x, s)
{-@ foo :: w:a -> ST <{v:a | v > w}> Bool a @-}
foo :: a -> ST Bool a
foo x = return x
bar = runState (foo 0) True
|
ssaavedra/liquidhaskell
|
tests/neg/StateConstraints00.hs
|
bsd-3-clause
| 662 | 0 | 10 | 202 | 149 | 85 | 64 | 10 | 1 |
{-# LANGUAGE FlexibleContexts, MultiParamTypeClasses, TypeFamilies #-}
module T4497 where
norm2PropR a = twiddle (norm2 a) a
twiddle :: Normed a => a -> a -> Double
twiddle a b = undefined
norm2 :: e -> RealOf e
norm2 = undefined
class (Num (RealOf t)) => Normed t
type family RealOf x
|
spacekitteh/smcghc
|
testsuite/tests/indexed-types/should_compile/T4497.hs
|
bsd-3-clause
| 311 | 0 | 8 | 76 | 99 | 52 | 47 | -1 | -1 |
{-# LANGUAGE GADTs #-}
-- Tests record syntax for GADTs
-- Record syntax in GADTs has been deprecated since July 2009
-- see commit 432b9c9322181a3644083e3c19b7e240d90659e7 by simonpj:
-- "New syntax for GADT-style record declarations, and associated refactoring"
-- and Trac #3306
-- It's been removed in August 2015
-- see Phab D1118
-- test should result in a parse error
module ShouldFail where
data T a where
T1 { x :: a, y :: b } :: T (a,b)
T4 { x :: Int } :: T [a]
|
ezyang/ghc
|
testsuite/tests/gadt/records-fail1.hs
|
bsd-3-clause
| 482 | 5 | 7 | 98 | 61 | 40 | 21 | -1 | -1 |
-- Copyright © 2021 Mark Raynsford <[email protected]> https://www.io7m.com
--
-- Permission to use, copy, modify, and/or distribute this software for any
-- purpose with or without fee is hereby granted, provided that the above
-- copyright notice and this permission notice appear in all copies.
--
-- THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-- WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-- MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-- ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-- WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-- ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-- OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
module Vector2f (
T (V2), x, y,
add2,
magnitude,
magnitude_squared,
mult2,
negation,
normalize,
scale,
sub2
) where
data T = V2 {
x :: Float,
y :: Float
} deriving (Eq, Ord, Show)
-- | Add vectors, @v0 + v1@.
add2 :: T -> T -> T
add2 (V2 x0 y0) (V2 x1 y1) =
V2 (x0 + x1) (y0 + y1)
-- | Subtract vectors, @v0 - v1@.
sub2 :: T -> T -> T
sub2 (V2 x0 y0) (V2 x1 y1) =
V2 (x0 - x1) (y0 - y1)
-- | Component-wise multiply vectors, @v0 * v1@.
mult2 :: T -> T -> T
mult2 (V2 x0 y0) (V2 x1 y1) =
V2 (x0 * x1) (y0 * y1)
-- | Scale vectors by scalars, @v * s@.
scale :: T -> Float -> T
scale (V2 x0 y0) s =
V2 (x0 * s) (y0 * s)
-- | Dot product of @v0@ and @v1@.
dot2 :: T -> T -> Float
dot2 v0 v1 =
case mult2 v0 v1 of
V2 x y -> x + y
-- | The squared magnitude of the @v = 'dot2' v v@.
magnitude_squared :: T -> Float
magnitude_squared v = dot2 v v
-- | The magnitude of @v@.
magnitude :: T -> Float
magnitude = sqrt . magnitude_squared
-- | @v@ with unit length.
normalize :: T -> T
normalize v =
let m = magnitude_squared v in
if m > 0.0 then
scale v (1.0 / m)
else
v
-- | The negation of @v@.
negation :: T -> T
negation (V2 x y) =
V2 (0.0 - x) (0.0 - y)
|
io7m/jcamera
|
com.io7m.jcamera.documentation/src/main/resources/com/io7m/jcamera/documentation/haskell/Vector2f.hs
|
isc
| 2,026 | 0 | 10 | 478 | 511 | 283 | 228 | 45 | 2 |
import Common
main = print $ maxTuple [ (p, length [ (c,b,a) | a <- [1..p], b <- [1..a], let c = p - (a+b), a > c, b > c, c > 0, a^2 == b^2 + c^2 ] ) | p <- [1..1000] ]
|
lekto/haskell
|
Project-Euler-Solutions/problem0039.hs
|
mit
| 171 | 0 | 17 | 48 | 148 | 79 | 69 | 2 | 1 |
{-# LANGUAGE Rank2Types #-}
module Command.ParseUtil (
pathspecSource
, PathSpec
) where
import Control.Monad
import Control.Monad.Trans ( lift )
import Data.List ( delete )
import Data.Machine
import System.IO
type PathSpec = FilePath
pathspecSource :: [PathSpec] -> Bool -> SourceT IO PathSpec
pathspecSource argPaths readMoreFromStdin =
if (readMoreFromStdin || elem stdinFilename argPaths)
then stdinAsLinesSource ~> prepended filteredArgPaths
else source filteredArgPaths
where
filteredArgPaths = delete stdinFilename argPaths
stdinFilename = "-"
stdinAsLinesSource :: SourceT IO String
stdinAsLinesSource = construct $ stdinLinesPlan
where
stdinLinesPlan = do
eof <- lift isEOF
unless eof $ do
line <- lift getLine
yield line
stdinLinesPlan
|
danstiner/clod
|
src/Command/ParseUtil.hs
|
mit
| 819 | 0 | 13 | 169 | 200 | 105 | 95 | 25 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Grammar.Greek.Script.Word where
import Prelude hiding (Word)
import Control.Lens (makeLensesFor)
import GHC.Generics (Generic)
import Data.Data
import Data.Serialize (Serialize)
import Grammar.Greek.Script.Types
data WordAccent = WordAccent
{ accentValue :: BasicAccent
, accentPosition :: AccentPosition
, accentForce :: ForceAcute
, accentExtra :: ExtraAccents
}
deriving (Eq, Ord, Show, Generic, Data, Typeable)
instance Serialize WordAccent
makeLensesFor
[ ("accentValue", "_accentValue")
, ("accentPosition", "_accentPosition")
, ("accentForce", "_accentForce")
, ("accentExtra", "_accentExtra")
]
''WordAccent
data Syllable = Syllable
{ syllableConsonants :: [ConsonantRho]
, syllableVowel :: VocalicSyllable
}
deriving (Eq, Ord, Show, Generic, Data, Typeable)
instance Serialize Syllable
makeLensesFor
[ ("syllableConsonants", "_syllableConsonants")
, ("syllableVowel", "_syllableVowel")
]
''Syllable
data Word = Word
{ wordInitialAspiration :: InitialAspiration
, wordSyllables :: [Syllable]
, wordFinalConsonants :: [ConsonantRho]
, wordAccent :: Maybe WordAccent
, wordCrasis :: Crasis
, wordElision :: Elision
, wordMarkPreservation :: MarkPreservation
, wordDiaeresisConvention :: DiaeresisConvention
, wordCapitalization :: Capitalization
, wordPunctuation :: HasWordPunctuation
}
deriving (Eq, Ord, Show, Generic, Data, Typeable)
instance Serialize Word
makeLensesFor
[ ("wordInitialAspiration", "_wordInitialAspiration")
, ("wordSyllables", "_wordSyllables")
, ("wordFinalConsonants", "_wordFinalConsonants")
, ("wordAccent", "_wordAccent")
, ("wordCrasis", "_wordCrasis")
, ("wordElision", "_wordElision")
, ("wordMarkPreservation", "_wordMarkPreservation")
, ("wordDiaeresisConvention", "_wordDiaeresisConvention")
, ("wordCapitalization", "_wordCapitalization")
, ("wordPunctuation", "_wordPunctuation")
]
''Word
|
ancientlanguage/haskell-analysis
|
greek-script/src/Grammar/Greek/Script/Word.hs
|
mit
| 2,031 | 0 | 9 | 292 | 473 | 286 | 187 | 57 | 0 |
{-# LANGUAGE PatternGuards #-}
module BotNuno where
import LI11718
import OracleT4
import OracleT3
import OracleT2
import OracleT1
--import Test.QuickCheck.Gen
import Data.List
import Data.Maybe
--import Safe
--import Debug.Trace
bot :: Double -> Jogo -> Int -> Acao
bot t e i | p' == Nothing = Acao False True (trg>0) (trg<0) nit
| otherwise = Acao (v<vt) (v-vt>0.2) (trg>0) (trg<0) Nothing
where p = (carros e!!i)
vt = 1.6/(lookahead/3.5)
lookahead = 1.2*(7 - k_atrito (pista e))
p' = colide m (lookahead*t) (carros (e)!!i)
Mapa ((pi,pj),pe) m = mapa (e)
Peca tp _ = (m!!pj!!pi)
prc = percorre [] m (pi,pj) pe
whereAmI = dropWhile (\(_,i,_) -> i /= ponto2Pos (posicao p)) (prc++prc)
whereAmI' = dropWhile (\(_,i,_) -> i /= ponto2Pos (posicao p)) (tail whereAmI)
dr0 = dir (posicao p) (whereAmI!!1)
dr' = if length whereAmI' > 0 then dir (posicao p) (whereAmI'!!1) else dr0
trg0 = distRad (round $ direcao p) (round dr0)
trg1 = distRad (round $ direcao p) (round dr')
trg = if abs trg0 < abs trg1 then trg0 else trg1
(v,_) = componentsToArrow (velocidade p)
ntc = (round (direcao p)) `mod` 4
nit | ntc /= i = Just ntc
| otherwise = Nothing
dir :: Ponto -> (Peca,Posicao,Orientacao) -> Double
dir p0 (Peca t _,p,_) = snd $ componentsToArrow (p'.-.p0)
where p' = centroPeca t p
distRad :: Int -> Int -> Int
distRad r1 r2 = ((r2-r1) + 180) `mod` 360 - 180
|
hpacheco/HAAP
|
examples/plab/svn/2017li1g186/src/BotNuno.hs
|
mit
| 1,552 | 0 | 13 | 436 | 731 | 391 | 340 | 35 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Web.Router (
App(..),
resourcesApp
) where
import Data.Text
import Yesod
import InstaHuskee as IH
type AuthUrl = Text
-- Yesod App Foundation
--------------------------------------------------------------------------------
data App = App
mkYesod "App" [parseRoutes|
/ HomeR GET
/auth AuthR GET
/auth/instagram RedirectR GET
/dashboard DashboardR GET
|]
instance Yesod App
-- Handlers
-- type GHandler m a = HandlerT App m a
-- type Handler a = HandlerT App IO a
-- type Handler Html = HandlerT App IO Html
--------------------------------------------------------------------------------
getHomeR :: Handler Html
getHomeR = defaultLayout homeTemplate
getAuthR :: Handler Html
getAuthR = do
authUrl <- liftIO $ IH.getAuthURL
defaultLayout $ authTemplate authUrl
getDashboardR :: Handler Html
getDashboardR = defaultLayout dashboardTemplate
getRedirectR :: Handler Html
getRedirectR = do
code <- lookupGetParam "code"
case code of
Just c -> do
token <- liftIO $ IH.getAuthToken c
defaultLayout igAuthOkTemplate
Nothing -> do
defaultLayout igAuthFailTemplate
-- Templates (pull out into separate hamlet files!)
--------------------------------------------------------------------------------
homeTemplate :: Widget
homeTemplate = do
setTitle "Root"
toWidget [whamlet|
Routes:
<a href=@{AuthR}>Authentication
<a href=@{DashboardR}>Dashboard
|]
authTemplate :: AuthUrl -> Widget
authTemplate authUrl = do
setTitle "Authentication"
toWidget [whamlet|
<a href="#{authUrl}">Authenticate me!
|]
dashboardTemplate :: Widget
dashboardTemplate = do
setTitle "Dashboard"
toWidget[whamlet|
Dashboard
|]
igAuthOkTemplate :: Widget
igAuthOkTemplate = do
setTitle "Auth Success!"
toWidget[whamlet|You are successfully authenticated!|]
igAuthFailTemplate :: Widget
igAuthFailTemplate = do
setTitle "Auth Failure..."
toWidget[whamlet|Authentication failed. Check logs for details.!|]
|
dzotokan/instahuskee
|
src/Web/Router.hs
|
mit
| 2,250 | 0 | 15 | 490 | 354 | 190 | 164 | 51 | 2 |
main =
let
s5 = sqrt 5.0
fib n = round $ (((1.0 + s5) / 2.0) ** fromIntegral n) / s5 - (((1.0 - s5) / 2.0) ** fromIntegral n) / s5
in
print $ sum $ filter even $ takeWhile (< 4000000) [fib i | i <- [1..]]
|
mohsen3/crispy-goggles
|
ProjectEuler/problem02/B.hs
|
mit
| 223 | 0 | 18 | 68 | 133 | 68 | 65 | 5 | 1 |
{-# htermination nub :: Eq a => [a] -> [a] #-}
import List
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/List_nub_2.hs
|
mit
| 59 | 0 | 3 | 13 | 5 | 3 | 2 | 1 | 0 |
-- | Utilities for pretty printing.
-- Adapted from @Apia.Utils.PrettyPrint
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UnicodeSyntax #-}
module Utils.PrettyPrint
( module Text.PrettyPrint
, bquotes
, Pretty(pretty)
, prettyShow
-- , sspaces
, squotes
) where
import Text.PrettyPrint
-- | Wrap a document in ‘...’.
bquotes ∷ Doc → Doc
bquotes d = char '‘' <> d <> char '’'
-- -- | Wrap a document in spaces.
-- spaces ∷ Doc → Doc
-- spaces d = space <> d <> space
-- | Wrap a string in ‘...’.
squotes ∷ String → Doc
squotes = bquotes . text
-- -- | Wrap a string in spaces.
-- sspaces ∷ String → Doc
-- sspaces = spaces . text
-- | Use instead of 'show' when printing to world.
prettyShow ∷ Pretty a ⇒ a → String
prettyShow = render . pretty
-- ------------------------------------------------------------------------------
-- | Pretty print type class.
class Pretty a where
pretty ∷ a → Doc
instance Pretty Doc where
pretty = id
instance Pretty String where
pretty = text
|
agomezl/tstp2agda
|
src/Utils/PrettyPrint.hs
|
mit
| 1,053 | 0 | 7 | 212 | 169 | 100 | 69 | 24 | 1 |
module Dama.Lexer.Lexer (Lexer, lex) where
import Prelude hiding (lex)
import Data.Char (isAlpha, isDigit, isLower, isUpper)
import Data.Monoid ((<>))
import Dama.Error
import Dama.Location
import Dama.Lexer.Token
type Lexer = LocList Char -> Either Error (LocList Token)
lex :: Lexer
lex ((l, c) :- cs)
| c == ' ' = lex cs
| c == '\n' = ((l, Newline) :-) <$> lex cs
| c == '(' = ((l, OpenParen) :-) <$> lex cs
| c == ')' = ((l, CloseParen) :-) <$> lex cs
| c == ':' = lexIdentifier l IdColon (c :) cs
| isFreeSymbol c = lexIdentifier l IdSymbol (c :) cs
| isLower c = lexIdentifier l IdLower (c :) cs
| isUpper c = lexIdentifier l IdUpper (c :) cs
| otherwise = Left . Error l $ "Unexpected character: " <> show c
lex (Nil l) = Right $ Nil l
lexIdentifier :: Location -> (String -> Token) -> (String -> String) -> Lexer
lexIdentifier s t a ((l, c) :- cs)
| c `elem` " \n()" = ((s, catchReserved . t $ a []) :-) <$> lex ((l, c) :- cs)
| isAlpha c || c == ':' || isFreeSymbol c || isDigit c = lexIdentifier s t (a . (c :)) cs
| otherwise = Left . Error l $ "Unexpected character: " <> show c
lexIdentifier s t a (Nil l) = Right $ (s, catchReserved . t $ a []) :- Nil l
catchReserved :: Token -> Token
catchReserved (IdSymbol "=") = Equals
catchReserved t = t
isFreeSymbol :: Char -> Bool
isFreeSymbol = (`elem` "!$%&*+-./<=>^")
|
tysonzero/dama
|
src/Dama/Lexer/Lexer.hs
|
mit
| 1,383 | 0 | 12 | 321 | 668 | 351 | 317 | 31 | 1 |
module AMore where
import Control.Applicative
import Data.Char (isAlpha, isAlphaNum, isSpace, isUpper)
import AParser
import SExpr hiding (ident, oneOrMore, spaces,
zeroOrMore)
------------------------------------------------------------
zeroOrMore :: Parser a -> Parser [a]
zeroOrMore p = oneOrMore p <|> pure []
oneOrMore :: Parser a -> Parser [a]
oneOrMore p = (:) <$> p <*> zeroOrMore p
-- tests:
-- runParser (zeroOrMore (satisfy isUpper)) "ABCdEfgH" == Just ("ABC","dEfgH")
-- runParser (oneOrMore (satisfy isUpper)) "ABCdEfgH" == Just ("ABC","dEfgH")
-- runParser (zeroOrMore (satisfy isUpper)) "abcdeFGh" == Just ("", "abcdeFGh")
-- runParser (oneOrMore (satisfy isUpper)) "abcdeFGh" == Nothing
------------------------------------------------------------
spaces :: Parser String
spaces = zeroOrMore (satisfy isSpace)
------------------------------------------------------------
ident :: Parser String
ident = (:) <$> satisfy isAlpha <*> zeroOrMore (satisfy isAlphaNum)
-- tests:
-- runParser ident "foobar baz" == Just ("foobar"," baz")
-- runParser ident "foo33fA" == Just ("foo33fA","")
-- runParser ident "2bad" == Nothing
-- runParser ident "" == Nothing
------------------------------------------------------------
parseAtom :: Parser Atom
parseAtom = N <$> posInt <|> I <$> ident
parseParens :: Parser a -> Parser a
parseParens exp = char '(' *> exp <* char ')'
parseSpaces :: Parser a -> Parser a
parseSpaces exp = spaces *> exp <* spaces
parseExprs :: Parser [SExpr]
parseExprs = parseParens $ oneOrMore parseSExpr
parseSExpr :: Parser SExpr
parseSExpr = parseSpaces $ A <$> parseAtom <|> Comb <$> parseExprs
-- tests
-- runParser (spaces *> posInt) " 345" == Just (345,"")
-- runParser parseSExpr "1" == Just (A (N 1),"")
-- runParser parseSExpr "(5)" == Just (Comb [A (N 5)],"")
-- runParser parseSExpr "(bar (foo) 3 5 874)"
-- runParser parseSExpr "(((lambda x (lambda y (plus x y))) 3) 5)"
-- runParser parseSExpr "( lots of ( spaces in ) this ( one ) )"
------------------------------------------------------------
|
sajith/cis194
|
hw11/AMore.hs
|
mit
| 2,177 | 0 | 8 | 423 | 350 | 193 | 157 | 24 | 1 |
{-# LANGUAGE NoImplicitPrelude, LambdaCase #-}
module Main (main) where
import Conflict (Conflict(..), parseConflicts, markerPrefix)
import qualified Control.Exception as E
import Control.Monad (when, unless, filterM)
import Data.Foldable (asum, traverse_)
import Data.List (isPrefixOf)
import Data.Maybe (mapMaybe)
import Environment (checkConflictStyle, openEditor, shouldUseColorByTerminal)
import qualified Opts
import Opts (Options(..))
import PPDiff (ppDiff, ColorEnable(..))
import Resolution (NewContent(..), resolveContent)
import SideDiff (getConflictDiffs, getConflictDiff2s)
import StrUtils (ensureNewline, stripNewline, unprefix)
import System.Directory (renameFile, removeFile, getCurrentDirectory)
import System.Exit (ExitCode(..), exitWith)
import System.FilePath ((<.>), makeRelative, joinPath, splitPath)
import qualified System.FilePath as FilePath
import System.IO (hPutStr, stderr)
import qualified System.PosixCompat.Files as PosixFiles
import System.Process (callProcess, readProcess, readProcessWithExitCode)
import Prelude.Compat
markerLine :: Char -> String -> String
markerLine c str = markerPrefix c ++ " " ++ str ++ "\n"
gitAdd :: FilePath -> IO ()
gitAdd fileName =
callProcess "git" ["add", "--", fileName]
dumpDiffs :: ColorEnable -> Options -> FilePath -> Int -> (Int, Conflict) -> IO ()
dumpDiffs colorEnable opts filePath count (idx, conflict) =
do
putStrLn $ unwords ["### Conflict", show idx, "of", show count]
when (shouldDumpDiffs opts) $ mapM_ dumpDiff $ getConflictDiffs conflict
when (shouldDumpDiff2 opts) $ dumpDiff2 $ getConflictDiff2s conflict
where
dumpDiff (side, (lineNo, marker), diff) =
do putStrLn $ concat
[filePath, ":", show lineNo, ":Diff", show side, ": ", marker]
putStr $ unlines $ map (ppDiff colorEnable) diff
dumpDiff2 ((lineNoA, markerA), (lineNoB, markerB), diff) =
do putStrLn $ concat [filePath, ":", show lineNoA, " <->", markerA]
putStrLn $ concat [filePath, ":", show lineNoB, ": ", markerB]
putStr $ unlines $ map (ppDiff colorEnable) diff
dumpAndOpenEditor :: ColorEnable -> Options -> FilePath -> [Conflict] -> IO ()
dumpAndOpenEditor colorEnable opts path conflicts =
do when (shouldDumpDiffs opts || shouldDumpDiff2 opts) $
mapM_ (dumpDiffs colorEnable opts path (length conflicts)) (zip [1..] conflicts)
openEditor opts path
overwrite :: FilePath -> String -> IO ()
overwrite fileName newContent =
do renameFile fileName bkup
writeFile fileName newContent
removeFile bkup
where
bkup = fileName <.> "bk"
resolve :: ColorEnable -> Options -> FilePath -> IO ()
resolve colorEnable opts fileName =
resolveContent . parseConflicts <$> readFile fileName
>>= \case
NewContent successes reductions failures newContent
| successes == 0 && allGood ->
do putStrLn $ fileName ++ ": No conflicts, git-adding"
gitAdd fileName
| successes == 0 && reductions == 0 ->
do putStrLn $ concat
[ fileName, ": Failed to resolve any of the "
, show failures, " conflicts" ]
doDump
| successes == 0 ->
do putStrLn $ concat
[ fileName, ": Reduced ", show reductions, " conflicts"]
overwrite fileName newContent
doDump
| otherwise ->
do putStrLn $ concat
[ fileName, ": Successfully resolved ", show successes
, " conflicts (failed to resolve " ++ show (reductions + failures) ++ " conflicts)"
, if allGood then ", git adding" else ""
]
overwrite fileName newContent
if allGood
then gitAdd fileName
else doDump
where
allGood = failures == 0 && reductions == 0
doDump =
dumpAndOpenEditor colorEnable opts fileName
[ conflict | Right conflict <- parseConflicts newContent ]
relativePath :: FilePath -> FilePath -> FilePath
relativePath base path
| rel /= path = rel
| revRel /= base =
joinPath $ replicate (length (splitPath revRel)) ".."
| otherwise = path
where
rel = makeRelative base path
revRel = makeRelative path base
(</>) :: FilePath -> FilePath -> FilePath
"." </> p = p
d </> p = d FilePath.</> p
isDirectory :: FilePath -> IO Bool
isDirectory x = PosixFiles.isDirectory <$> PosixFiles.getFileStatus x
withAllStageFiles ::
FilePath -> (FilePath -> Maybe FilePath -> Maybe FilePath -> IO b) -> IO b
withAllStageFiles path action =
do let stdin = ""
[baseTmpRaw, localTmpRaw, remoteTmpRaw] <-
take 3 . words <$>
readProcess "git" ["checkout-index", "--stage=all", "--", path] stdin
cdup <-
takeWhile (/= '\0') . stripNewline <$>
readProcess "git" ["rev-parse", "--show-cdup"] stdin
let maybePath "." = Nothing
maybePath p = Just (cdup </> p)
let mLocalTmp = maybePath localTmpRaw
mRemoteTmp = maybePath remoteTmpRaw
baseTmp = cdup </> baseTmpRaw
action baseTmp mLocalTmp mRemoteTmp
`E.finally`
do removeFile baseTmp
traverse_ removeFile mLocalTmp
traverse_ removeFile mRemoteTmp
deleteModifyConflictAddMarkers :: FilePath -> IO ()
deleteModifyConflictAddMarkers path =
withAllStageFiles path $ \baseTmp mLocalTmp mRemoteTmp ->
do baseContent <- readFile baseTmp
localContent <- maybe (return "") readFile mLocalTmp
remoteContent <- maybe (return "") readFile mRemoteTmp
overwrite path $
concat
[ markerLine '<' "LOCAL"
, ensureNewline localContent
, markerLine '|' "BASE"
, ensureNewline baseContent
, markerLine '=' ""
, ensureNewline remoteContent
, markerLine '>' "REMOTE"
]
deleteModifyConflictHandle :: FilePath -> IO ()
deleteModifyConflictHandle path =
do marked <- any (markerPrefix '<' `isPrefixOf`) . lines <$> readFile path
unless marked $
do putStrLn $ show path ++ " has a delete/modify conflict. Adding conflict markers"
deleteModifyConflictAddMarkers path
removeFileIfEmpty :: FilePath -> IO ()
removeFileIfEmpty path =
do isEmpty <- null <$> readFile path
when isEmpty $
do removeFile path
callProcess "git" ["add", "-u", "--", path]
getStatusPorcelain :: IO String
getStatusPorcelain =
do (statusCode, statusPorcelain, statusStderr) <-
readProcessWithExitCode "git" ["status", "--porcelain"] ""
when (statusCode /= ExitSuccess) $ do
-- Print git's error message. Usually -
-- "fatal: Not a git repository (or any of the parent directories): .git"
hPutStr stderr statusStderr
exitWith statusCode
return statusPorcelain
getGitRootDir :: IO FilePath
getGitRootDir =
do cwd <- getCurrentDirectory
relativePath cwd . stripNewline <$>
readProcess "git" ["rev-parse", "--show-toplevel"] ""
makeFilesMatchingPrefixes :: IO ([String] -> IO [FilePath])
makeFilesMatchingPrefixes =
do statusPorcelain <- getStatusPorcelain
rootDir <- getGitRootDir
let rootRelativeFiles =
filterM (fmap not . isDirectory) . map (rootDir </>)
let firstMatchingPrefix :: [String] -> String -> Maybe String
firstMatchingPrefix prefixes =
asum . traverse unprefix prefixes
let filesMatchingPrefixes :: [String] -> IO [FilePath]
filesMatchingPrefixes prefixes =
rootRelativeFiles . mapMaybe (firstMatchingPrefix prefixes)
$ lines statusPorcelain
pure filesMatchingPrefixes
mediateAll :: ColorEnable -> Options -> IO ()
mediateAll colorEnable opts =
do filesMatchingPrefixes <- makeFilesMatchingPrefixes
-- from git-diff manpage:
-- Added (A), Copied (C), Deleted (D), Modified (M), Renamed (R),
-- have their type (i.e. regular file, symlink, submodule, ...) changed (T),
-- are Unmerged (U), are Unknown (X), or have had their pairing Broken (B)
deleteModifyConflicts <- filesMatchingPrefixes ["DU ", "UD "]
mapM_ deleteModifyConflictHandle deleteModifyConflicts
filesMatchingPrefixes ["UU ", "AA ", "DA ", "AD ", "DU ", "UD "]
>>= mapM_ (resolve colorEnable opts)
-- Heuristically delete files that were remove/modify conflict
-- and ended up with empty content
mapM_ removeFileIfEmpty deleteModifyConflicts
main :: IO ()
main =
do opts <- Opts.getOpts
colorEnable <-
case shouldUseColor opts of
Nothing -> shouldUseColorByTerminal
Just colorEnable -> return colorEnable
checkConflictStyle opts
case mergeSpecificFile opts of
Nothing -> mediateAll colorEnable opts
Just path -> resolve colorEnable opts path
|
ElastiLotem/resolve-trivial-conflicts
|
src/Main.hs
|
gpl-2.0
| 9,341 | 0 | 19 | 2,661 | 2,401 | 1,218 | 1,183 | 190 | 3 |
{-|
This is a simple example use of the Pepa library.
-}
module Main
( main )
where
{- Standard library modules imported -}
import System.Console.GetOpt
( OptDescr ( .. ) )
import System.Exit
( ExitCode ( .. )
, exitFailure
)
{- External Library Modules Imported -}
{- Local modules imported -}
import Language.Pepa.Syntax
( ParsedModel )
import Ipc.Ipc
( parseAndMain
, processPepaModel
, CliOptions
)
import Ipc.Cli
( getCliArgs
, Cli ( .. )
, CliOpt ( .. )
, toCli
, baseCliOptions
)
import Language.Pepa.MainControl
( MainControl )
{- End of module imports -}
main :: IO ()
main = getCliArgs >>= (processArgs . toCliIpc)
toCliIpc :: [ String ] -> Cli ()
toCliIpc = toCli pepaCheckVersion "pepacheck" pepaCheckOptions
pepaCheckOptions :: [ OptDescr ( CliOpt () ) ]
pepaCheckOptions = baseCliOptions
pepaCheckVersion :: String
pepaCheckVersion = "0.99"
-- Processing of the command-line arguments.
processArgs :: Cli () -> IO ()
processArgs (CliValid options files) =
do errorCodes <- mapM (processFile options) files
if null [ i | ExitFailure i <- errorCodes ]
then return ()
else exitFailure
processArgs (CliInfo _ _ infoString) =
do putStrLn pepaCheckBanner
putStrLn infoString
processArgs (CliError _ _ errorString) =
do putStrLn pepaCheckBanner
putStrLn errorString
pepaCheckBanner :: String
pepaCheckBanner = "This is pepacheck version " ++ pepaCheckVersion
processFile :: CliOptions a -> FilePath -> IO ExitCode
processFile options file =
parseAndMain options file processModel postAction
where
processModel :: ParsedModel -> MainControl ()
processModel model = (processPepaModel options model) >> return ()
postAction :: () -> IO ExitCode
postAction _ = return ExitSuccess
|
allanderek/ipclib
|
examples/PepaCheck.hs
|
gpl-2.0
| 1,828 | 0 | 12 | 404 | 468 | 250 | 218 | 50 | 2 |
{-# LANGUAGE TupleSections #-}
{-
Copyright (C) 2009 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- | Functions for parsing a LaTeX formula to a Haskell representation.
-}
module Text.TeXMath.Readers.TeX (readTeX)
where
import Data.List (intercalate)
import Control.Monad
import Data.Char (isDigit, isAscii, isLetter)
import qualified Data.Map as M
import Text.Parsec hiding (label)
import Text.Parsec.Error
import Text.Parsec.String
import Text.TeXMath.Types
import Control.Applicative ((<*), (*>), (<*>), (<$>), (<$), pure)
import qualified Text.TeXMath.Shared as S
import Text.TeXMath.Readers.TeX.Macros (applyMacros, parseMacroDefinitions)
import Text.TeXMath.Unicode.ToTeX (getSymbolType)
import Data.Maybe (fromMaybe, fromJust)
type TP = Parser
-- The parser
expr1 :: TP Exp
expr1 = choice
[ inbraces
, variable
, number
, text
, styled
, root
, mspace
, phantom
, boxed
, binary
, genfrac
, substack
, bareSubSup
, environment
, diacritical
, unicode
, ensuremath
, enclosure
, texSymbol
] <* ignorable
-- | Parse a formula, returning a list of 'Exp'.
readTeX :: String -> Either String [Exp]
readTeX inp =
let (ms, rest) = parseMacroDefinitions inp in
either (Left . showParseError inp) (Right . id)
$ parse formula "formula" (applyMacros ms rest)
showParseError :: String -> ParseError -> String
showParseError inp pe =
snippet ++ "\n" ++ caretline ++
showErrorMessages "or" "unknown" "expecting" "unexpected" "eof"
(errorMessages pe)
where errln = sourceLine (errorPos pe)
errcol = sourceColumn (errorPos pe)
snipoffset = max 0 (errcol - 20)
inplns = lines inp
ln = if length inplns >= errln
then inplns !! (errln - 1)
else "" -- should not happen
snippet = take 40 $ drop snipoffset ln
caretline = replicate (errcol - snipoffset - 1) ' ' ++ "^"
ctrlseq :: String -> TP String
ctrlseq s = lexeme $ try $ do
result <- string ('\\':s)
case s of
[c] | not (isLetter c) -> return ()
_ -> (do pos <- getPosition
letter
setPosition pos
mzero <?> ("non-letter after \\" ++ s))
<|> return ()
return result
ignorable :: TP ()
ignorable = skipMany (comment <|> label <|> (skipMany1 space <?> "whitespace"))
comment :: TP ()
comment = char '%' *> skipMany (noneOf "\n") *> optional newline
label :: TP ()
label = ctrlseq "label" *> braces (skipMany (noneOf "}"))
unGrouped :: Exp -> [Exp]
unGrouped (EGrouped xs) = xs
unGrouped x = [x]
formula :: TP [Exp]
formula = unGrouped <$> (ignorable *> manyExp expr <* eof)
expr :: TP Exp
expr = do
optional (ctrlseq "displaystyle")
(a, convertible) <- try (braces operatorname) -- needed because macros add {}
<|> ((,False) <$> expr1)
<|> operatorname
limits <- limitsIndicator
subSup limits convertible a <|> superOrSubscripted limits convertible a <|> return a
-- | Parser for \operatorname command.
-- Returns a tuple of EMathOperator name and Bool depending on the flavor
-- of the command:
--
-- - True for convertible operator (\operator*)
--
-- - False otherwise
operatorname :: TP (Exp, Bool)
operatorname = do
ctrlseq "operatorname"
convertible <- (char '*' >> spaces >> return True) <|> return False
op <- expToOperatorName <$> texToken
maybe mzero (\s -> return (EMathOperator s, convertible)) op
-- | Converts identifiers, symbols and numbers to a flat string.
-- Returns Nothing if the expression contains anything else.
expToOperatorName :: Exp -> Maybe String
expToOperatorName e = case e of
EGrouped xs -> concat <$> mapM fl xs
_ -> fl e
where fl f = case f of
EIdentifier s -> Just s
-- handle special characters
ESymbol _ "\x2212" -> Just "-"
ESymbol _ "\x2032" -> Just "'"
ESymbol _ "\x2033" -> Just "''"
ESymbol _ "\x2034" -> Just "'''"
ESymbol _ "\x2057" -> Just "''''"
ESymbol _ "\x02B9" -> Just "'"
ESymbol _ s -> Just s
ENumber s -> Just s
_ -> Nothing
bareSubSup :: TP Exp
bareSubSup = subSup Nothing False (EIdentifier "")
<|> superOrSubscripted Nothing False (EIdentifier "")
limitsIndicator :: TP (Maybe Bool)
limitsIndicator =
(ctrlseq "limits" >> return (Just True))
<|> (ctrlseq "nolimits" >> return (Just False))
<|> return Nothing
binomCmd :: TP String
binomCmd = oneOfCommands (map fst binomCmds)
binomCmds :: [(String, Exp -> Exp -> Exp)]
binomCmds = [ ("\\choose", \x y ->
EDelimited "(" ")" [Right (EFraction NoLineFrac x y)])
, ("\\brack", \x y ->
EDelimited "[" "]" [Right (EFraction NoLineFrac x y)])
, ("\\brace", \x y ->
EDelimited "{" "}" [Right (EFraction NoLineFrac x y)])
, ("\\bangle", \x y ->
EDelimited "\x27E8" "\x27E9" [Right (EFraction NoLineFrac x y)])
]
genfrac :: TP Exp
genfrac = do
ctrlseq "genfrac"
openDelim <- braces $ option '(' ((char '\\' >> oneOf "{}") <|> anyChar)
closeDelim <- braces $ option ')' ((char '\\' >> oneOf "{}") <|> anyChar)
bar <- False <$ try (braces (string "0pt")) <|> True <$ texToken
displayStyle <- True <$ try (braces (char '0')) <|> False <$ texToken
x <- texToken
y <- texToken
let fracType = case (bar, displayStyle) of
(False, _) -> NoLineFrac
(True, True) -> DisplayFrac
_ -> NormalFrac
return $ EDelimited [openDelim] [closeDelim] [Right (EFraction fracType x y)]
substack :: TP Exp
substack = do
ctrlseq "substack"
formulas <- braces $ ignorable >> (manyExp expr) `sepEndBy` endLine
return $ EArray [AlignCenter] $ map (\x -> [[x]]) formulas
asGroup :: [Exp] -> Exp
asGroup [x] = x
asGroup xs = EGrouped xs
-- variant of many that is sensitive to \choose and other such commands
manyExp' :: Bool -> TP Exp -> TP Exp
manyExp' requireNonempty p = do
initial <- if requireNonempty
then many1 (notFollowedBy binomCmd >> p)
else many (notFollowedBy binomCmd >> p)
let withCmd :: String -> TP Exp
withCmd cmd =
case lookup cmd binomCmds of
Just f -> f <$> (asGroup <$> pure initial)
<*> (asGroup <$> many p)
Nothing -> fail $ "Unknown command " ++ cmd
(binomCmd >>= withCmd) <|> return (asGroup initial)
manyExp :: TP Exp -> TP Exp
manyExp = manyExp' False
many1Exp :: TP Exp -> TP Exp
many1Exp = manyExp' True
inbraces :: TP Exp
inbraces = braces (manyExp expr)
texToken :: TP Exp
texToken = texSymbol <|> inbraces <|> inbrackets <|> texChar
texChar :: TP Exp
texChar =
do
c <- noneOf "\n\t\r \\{}" <* spaces
return $ if isDigit c
then ENumber [c]
else EIdentifier [c]
inbrackets :: TP Exp
inbrackets = (brackets $ manyExp $ notFollowedBy (char ']') >> expr)
number :: TP Exp
number = lexeme $ ENumber <$> many1 digit
enclosure :: TP Exp
enclosure = basicEnclosure <|> scaledEnclosure <|> delimited
-- Expensive
basicEnclosure :: TP Exp
basicEnclosure = choice (map (\(s, v) -> symbol s >> return v) enclosures)
fence :: String -> TP String
fence cmd = do
symbol cmd
enc <- basicEnclosure <|> (try (symbol ".") >> return (ESymbol Open ""))
case enc of
ESymbol Open x -> return x
ESymbol Close x -> return x
_ -> mzero
middle :: TP String
middle = fence "\\middle"
right :: TP String
right = fence "\\right"
delimited :: TP Exp
delimited = do
openc <- try $ fence "\\left"
contents <- concat <$>
many (try $ ((:[]) . Left <$> middle)
<|> (map Right . unGrouped <$>
many1Exp (notFollowedBy right *> expr)))
closec <- right <|> return ""
return $ EDelimited openc closec contents
scaledEnclosure :: TP Exp
scaledEnclosure = do
cmd <- oneOfCommands (map fst S.scalers)
case S.getScalerValue cmd of
Just r -> EScaled r <$> basicEnclosure
Nothing -> mzero
endLine :: TP Char
endLine = try $ do
symbol "\\\\"
optional inbrackets -- can contain e.g. [1.0in] for a line height, not yet supported
optional $ ctrlseq "hline"
-- we don't represent the line, but it shouldn't crash parsing
return '\n'
arrayLine :: TP ArrayLine
arrayLine = notFollowedBy (ctrlseq "end" >> return '\n') >>
sepBy1 (unGrouped <$> manyExp (notFollowedBy endLine >> expr)) (symbol "&")
arrayAlignments :: TP [Alignment]
arrayAlignments = try $ do
as <- braces (many letter)
let letterToAlignment 'l' = AlignLeft
letterToAlignment 'c' = AlignCenter
letterToAlignment 'r' = AlignRight
letterToAlignment _ = AlignDefault
return $ map letterToAlignment as
environment :: TP Exp
environment = do
ctrlseq "begin"
name <- braces (oneOfStrings (M.keys environments) <* optional (char '*'))
spaces
case M.lookup name environments of
Just env -> do
result <- env
spaces
ctrlseq "end"
braces (string name <* optional (char '*'))
spaces
return result
Nothing -> mzero -- should not happen
environments :: M.Map String (TP Exp)
environments = M.fromList
[ ("array", stdarray)
, ("eqnarray", eqnarray)
, ("align", align)
, ("aligned", align)
, ("alignat", inbraces *> spaces *> align)
, ("alignedat", inbraces *> spaces *> align)
, ("flalign", flalign)
, ("flaligned", flalign)
, ("cases", cases)
, ("matrix", matrixWith "" "")
, ("smallmatrix", matrixWith "" "")
, ("pmatrix", matrixWith "(" ")")
, ("bmatrix", matrixWith "[" "]")
, ("Bmatrix", matrixWith "{" "}")
, ("vmatrix", matrixWith "\x2223" "\x2223")
, ("Vmatrix", matrixWith "\x2225" "\x2225")
, ("split", align)
, ("multline", gather)
, ("gather", gather)
, ("gathered", gather)
]
mbArrayAlignments :: TP (Maybe [Alignment])
mbArrayAlignments = option Nothing $ Just <$> arrayAlignments
alignsFromRows :: Alignment -> [ArrayLine] -> [Alignment]
alignsFromRows _ [] = []
alignsFromRows defaultAlignment (r:_) = replicate (length r) defaultAlignment
matrixWith :: String -> String -> TP Exp
matrixWith opendelim closedelim = do
mbaligns <- mbArrayAlignments
lines' <- sepEndBy1 arrayLine endLine
let aligns = fromMaybe (alignsFromRows AlignCenter lines') mbaligns
return $ if null opendelim && null closedelim
then EArray aligns lines'
else EDelimited opendelim closedelim [Right $ EArray aligns lines']
stdarray :: TP Exp
stdarray = do
mbaligns <- mbArrayAlignments
lines' <- sepEndBy1 arrayLine endLine
let aligns = fromMaybe (alignsFromRows AlignDefault lines') mbaligns
return $ EArray aligns lines'
gather :: TP Exp
gather = do
rows <- sepEndBy arrayLine endLine
return $ EArray (alignsFromRows AlignCenter rows) rows
eqnarray :: TP Exp
eqnarray = (EArray [AlignRight, AlignCenter, AlignLeft]) <$>
sepEndBy1 arrayLine endLine
align :: TP Exp
align = (EArray [AlignRight, AlignLeft]) <$> sepEndBy1 arrayLine endLine
flalign :: TP Exp
flalign = (EArray [AlignLeft, AlignRight]) <$> sepEndBy1 arrayLine endLine
cases :: TP Exp
cases = do
rs <- sepEndBy1 arrayLine endLine
return $ EDelimited "{" "" [Right $ EArray (alignsFromRows AlignDefault rs) rs]
variable :: TP Exp
variable = do
v <- letter
spaces
return $ EIdentifier [v]
isConvertible :: Exp -> Bool
isConvertible (EMathOperator x) = x `elem` convertibleOps
where convertibleOps = [ "lim","liminf","limsup","inf","sup"
, "min","max","Pr","det","gcd"
]
isConvertible (ESymbol Rel _) = True
isConvertible (ESymbol Bin _) = True
isConvertible (ESymbol Op x) = x `elem` convertibleSyms
where convertibleSyms = ["\x2211","\x220F","\x22C2",
"\x22C3","\x22C0","\x22C1","\x2A05","\x2A06",
"\x2210","\x2A01","\x2A02","\x2A00","\x2A04"]
isConvertible _ = False
-- check if sub/superscripts should always be under and over the expression
isUnderover :: Exp -> Bool
isUnderover (EOver _ _ (ESymbol Accent "\xFE37")) = True -- \overbrace
isUnderover (EOver _ _ (ESymbol Accent "\x23B4")) = True -- \overbracket
isUnderover (EUnder _ _ (ESymbol Accent "\xFE38")) = True -- \underbrace
isUnderover (EUnder _ _ (ESymbol Accent "\x23B5")) = True -- \underbracket
isUnderover (EOver _ _ (ESymbol Accent "\x23DE")) = True -- \overbrace
isUnderover (EUnder _ _ (ESymbol Accent "\x23DF")) = True -- \underbrace
isUnderover _ = False
subSup :: Maybe Bool -> Bool -> Exp -> TP Exp
subSup limits convertible a = try $ do
let sub1 = symbol "_" >> expr1
let sup1 = symbol "^" >> expr1
(b,c) <- try (do {m <- sub1; n <- sup1; return (m,n)})
<|> (do {n <- sup1; m <- sub1; return (m,n)})
return $ case limits of
Just True -> EUnderover False a b c
Nothing | convertible || isConvertible a -> EUnderover True a b c
| isUnderover a -> EUnderover False a b c
_ -> ESubsup a b c
superOrSubscripted :: Maybe Bool -> Bool -> Exp -> TP Exp
superOrSubscripted limits convertible a = try $ do
c <- oneOf "^_"
spaces
b <- expr
case c of
'^' -> return $ case limits of
Just True -> EOver False a b
Nothing
| convertible || isConvertible a -> EOver True a b
| isUnderover a -> EOver False a b
_ -> ESuper a b
'_' -> return $ case limits of
Just True -> EUnder False a b
Nothing
| convertible || isConvertible a -> EUnder True a b
| isUnderover a -> EUnder False a b
_ -> ESub a b
_ -> mzero
unicode :: TP Exp
unicode = lexeme $
do
c <- satisfy (not . isAscii)
return (ESymbol (getSymbolType c) [c])
ensuremath :: TP Exp
ensuremath = ctrlseq "ensuremath" *> inbraces
-- Note: cal and scr are treated the same way, as unicode is lacking such two different sets for those.
styleOps :: M.Map String ([Exp] -> Exp)
styleOps = M.fromList
[ ("\\mathrm", EStyled TextNormal)
, ("\\mathup", EStyled TextNormal)
, ("\\mbox", EStyled TextNormal)
, ("\\mathbf", EStyled TextBold)
, ("\\mathbfup", EStyled TextBold)
, ("\\mathit", EStyled TextItalic)
, ("\\mathtt", EStyled TextMonospace)
, ("\\texttt", EStyled TextMonospace)
, ("\\mathsf", EStyled TextSansSerif)
, ("\\mathsfup", EStyled TextSansSerif)
, ("\\mathbb", EStyled TextDoubleStruck)
, ("\\mathcal", EStyled TextScript)
, ("\\mathscr", EStyled TextScript)
, ("\\mathfrak", EStyled TextFraktur)
, ("\\mathbfit", EStyled TextBoldItalic)
, ("\\mathbfsfup", EStyled TextSansSerifBold)
, ("\\mathbfsfit", EStyled TextSansSerifBoldItalic)
, ("\\mathbfscr", EStyled TextBoldScript)
, ("\\mathbffrak", EStyled TextBoldFraktur)
, ("\\mathbfcal", EStyled TextBoldScript)
, ("\\mathsfit", EStyled TextSansSerifItalic)
]
diacritical :: TP Exp
diacritical = do
c <- oneOfCommands (map snd S.diacriticals)
case S.getDiacriticalCons c of
Just r -> r <$> texToken
Nothing -> mzero
phantom :: TP Exp
phantom = EPhantom <$> (ctrlseq "phantom" *> texToken)
boxed :: TP Exp
boxed = EBoxed <$> (ctrlseq "boxed" *> texToken)
text :: TP Exp
text = do
c <- oneOfCommands (M.keys textOps)
maybe mzero (<$> (bracedText <* spaces)) $ M.lookup c textOps
textOps :: M.Map String (String -> Exp)
textOps = M.fromList
[ ("\\textrm", (EText TextNormal))
, ("\\text", (EText TextNormal))
, ("\\textbf", (EText TextBold))
, ("\\textit", (EText TextItalic))
, ("\\texttt", (EText TextMonospace))
, ("\\textsf", (EText TextSansSerif))
]
styled :: TP Exp
styled = do
c <- oneOfCommands (M.keys styleOps)
case M.lookup c styleOps of
Just f -> do
x <- inbraces
return $ case x of
EGrouped xs -> f xs
_ -> f [x]
Nothing -> mzero
-- note: sqrt can be unary, \sqrt{2}, or binary, \sqrt[3]{2}
root :: TP Exp
root = do
ctrlseq "sqrt" <|> ctrlseq "surd"
(ERoot <$> inbrackets <*> texToken) <|> (ESqrt <$> texToken)
mspace :: TP Exp
mspace = do
ctrlseq "mspace"
lexeme $ char '{'
len <- many1 digit
lexeme $ string "mu"
lexeme $ char '}'
case reads len of
((n,[]):_) -> return $ ESpace (n/18)
_ -> mzero
binary :: TP Exp
binary = do
c <- oneOfCommands binops
a <- texToken
b <- texToken
case c of
"\\overset" -> return $ EOver False b a
"\\stackrel" -> return $ EOver False b a
"\\underset" -> return $ EUnder False b a
"\\frac" -> return $ EFraction NormalFrac a b
"\\tfrac" -> return $ EFraction InlineFrac a b
"\\dfrac" -> return $ EFraction DisplayFrac a b
"\\binom" -> return $ EDelimited "(" ")"
[Right (EFraction NoLineFrac a b)]
_ -> fail "Unrecognised binary operator"
where
binops = ["\\overset", "\\stackrel", "\\underset", "\\frac", "\\tfrac", "\\dfrac", "\\binom"]
texSymbol :: TP Exp
texSymbol = do
negated <- (try (ctrlseq "not") >> return True) <|> return False
sym <- operator <|> tSymbol
if negated then neg sym else return sym
oneOfCommands :: [String] -> TP String
oneOfCommands cmds = try $ do
cmd <- oneOfStrings cmds
case cmd of
['\\',c] | not (isLetter c) -> return ()
_ -> (do pos <- getPosition
letter
setPosition pos
mzero <?> ("non-letter after " ++ cmd))
<|> return ()
spaces
return cmd
oneOfStrings' :: (Char -> Char -> Bool) -> [String] -> TP String
oneOfStrings' _ [] = mzero
oneOfStrings' matches strs = try $ do
c <- anyChar
let strs' = [xs | (x:xs) <- strs, x `matches` c]
case strs' of
[] -> mzero
_ -> (c:) <$> oneOfStrings' matches strs'
<|> if "" `elem` strs'
then return [c]
else mzero
-- | Parses one of a list of strings. If the list contains
-- two strings one of which is a prefix of the other, the longer
-- string will be matched if possible.
oneOfStrings :: [String] -> TP String
oneOfStrings strs = oneOfStrings' (==) strs <??> (intercalate ", " $ map show strs)
-- | Like '(<?>)', but moves position back to the beginning of the parse
-- before reporting the error.
(<??>) :: Monad m => ParsecT s u m a -> String -> ParsecT s u m a
(<??>) p expected = do
pos <- getPosition
p <|> (setPosition pos >> mzero <?> expected)
infix 0 <??>
tSymbol :: TP Exp
tSymbol = do
sym <- oneOfCommands (M.keys symbols)
return $ fromJust (M.lookup sym symbols)
operator :: TP Exp
operator = do
sym <- lexeme (oneOfStrings $ M.keys operators)
return $ fromJust (M.lookup sym operators)
neg :: Exp -> TP Exp
neg (ESymbol Rel x) = ESymbol Rel `fmap`
case x of
"\x2282" -> return "\x2284"
"\x2283" -> return "\x2285"
"\x2286" -> return "\x2288"
"\x2287" -> return "\x2289"
"\x2208" -> return "\x2209"
_ -> mzero
neg _ = mzero
lexeme :: TP a -> TP a
lexeme p = p <* ignorable
braces :: TP a -> TP a
braces p = lexeme $ char '{' *> spaces *> p <* spaces <* char '}'
brackets :: TP a -> TP a
brackets p = lexeme $ char '[' *> spaces *> p <* spaces <* char ']'
symbol :: String -> TP String
symbol s = lexeme $ try $ string s
enclosures :: [(String, Exp)]
enclosures = [ ("(", ESymbol Open "(")
, (")", ESymbol Close ")")
, ("[", ESymbol Open "[")
, ("]", ESymbol Close "]")
, ("\\{", ESymbol Open "{")
, ("\\}", ESymbol Close "}")
, ("\\lbrack", ESymbol Open "[")
, ("\\lbrace", ESymbol Open "{")
, ("\\rbrack", ESymbol Close "]")
, ("\\rbrace", ESymbol Close "}")
, ("\\llbracket", ESymbol Open "\x27E6")
, ("\\rrbracket", ESymbol Close "\x27E7")
, ("\\langle", ESymbol Open "\x27E8")
, ("\\rangle", ESymbol Close "\x27E9")
, ("\\lfloor", ESymbol Open "\x230A")
, ("\\rfloor", ESymbol Close "\x230B")
, ("\\lceil", ESymbol Open "\x2308")
, ("\\rceil", ESymbol Close "\x2309")
, ("|", ESymbol Open "|")
, ("|", ESymbol Close "|")
, ("\\|", ESymbol Open "\x2225")
, ("\\|", ESymbol Close "\x2225")
, ("\\lvert", ESymbol Open "\x7C")
, ("\\rvert", ESymbol Close "\x7C")
, ("\\vert", ESymbol Close "\x7C")
, ("\\lVert", ESymbol Open "\x2225")
, ("\\rVert", ESymbol Close "\x2225")
, ("\\Vert", ESymbol Close "\x2016")
, ("\\ulcorner", ESymbol Open "\x231C")
, ("\\urcorner", ESymbol Close "\x231D")
]
operators :: M.Map String Exp
operators = M.fromList [
("+", ESymbol Bin "+")
, ("-", ESymbol Bin "\x2212")
, ("*", ESymbol Bin "*")
, ("@", ESymbol Ord "@")
, (",", ESymbol Pun ",")
, (".", ESymbol Ord ".")
, (";", ESymbol Pun ";")
, (":", ESymbol Rel ":")
, ("?", ESymbol Ord "?")
, (">", ESymbol Rel ">")
, ("<", ESymbol Rel "<")
, ("!", ESymbol Ord "!")
, ("'", ESymbol Ord "\x2032")
, ("''", ESymbol Ord "\x2033")
, ("'''", ESymbol Ord "\x2034")
, ("''''", ESymbol Ord "\x2057")
, ("=", ESymbol Rel "=")
, (":=", ESymbol Rel ":=")
, ("/", ESymbol Ord "/")
, ("~", ESpace (4/18)) ]
symbols :: M.Map String Exp
symbols = M.fromList
[ ("\\$", ESymbol Ord "$")
, ("\\%", ESymbol Ord "%")
, ("\\&", ESymbol Ord "&")
, ("\\_", ESymbol Ord "_")
, ("\\#", ESymbol Ord "#")
, ("\\^", ESymbol Ord "^")
, ("\\mid", ESymbol Bin "\x2223")
, ("\\colon", ESymbol Pun ":")
, ("\\parallel", ESymbol Rel "\x2225")
, ("\\backslash", ESymbol Bin "\x2216")
, ("\\setminus", ESymbol Bin "\\")
, ("\\times", ESymbol Bin "\x00D7")
, ("\\alpha", EIdentifier "\x03B1")
, ("\\beta", EIdentifier "\x03B2")
, ("\\chi", EIdentifier "\x03C7")
, ("\\delta", EIdentifier "\x03B4")
, ("\\Delta", ESymbol Op "\x0394")
, ("\\epsilon", EIdentifier "\x03F5")
, ("\\varepsilon", EIdentifier "\x03B5")
, ("\\eta", EIdentifier "\x03B7")
, ("\\gamma", EIdentifier "\x03B3")
, ("\\Gamma", ESymbol Op "\x0393")
, ("\\iota", EIdentifier "\x03B9")
, ("\\kappa", EIdentifier "\x03BA")
, ("\\lambda", EIdentifier "\x03BB")
, ("\\Lambda", ESymbol Op "\x039B")
, ("\\mu", EIdentifier "\x03BC")
, ("\\nu", EIdentifier "\x03BD")
, ("\\omega", EIdentifier "\x03C9")
, ("\\Omega", ESymbol Op "\x03A9")
, ("\\phi", EIdentifier "\x03D5")
, ("\\varphi", EIdentifier "\x03C6")
, ("\\Phi", ESymbol Op "\x03A6")
, ("\\pi", EIdentifier "\x03C0")
, ("\\Pi", ESymbol Op "\x03A0")
, ("\\psi", EIdentifier "\x03C8")
, ("\\Psi", ESymbol Ord "\x03A8")
, ("\\rho", EIdentifier "\x03C1")
, ("\\sigma", EIdentifier "\x03C3")
, ("\\Sigma", ESymbol Op "\x03A3")
, ("\\tau", EIdentifier "\x03C4")
, ("\\theta", EIdentifier "\x03B8")
, ("\\vartheta", EIdentifier "\x03D1")
, ("\\Theta", ESymbol Op "\x0398")
, ("\\upsilon", EIdentifier "\x03C5")
, ("\\Upsilon", EIdentifier "\x03A5")
, ("\\xi", EIdentifier "\x03BE")
, ("\\Xi", ESymbol Op "\x039E")
, ("\\zeta", EIdentifier "\x03B6")
, ("\\pm", ESymbol Bin "\x00B1")
, ("\\mp", ESymbol Bin "\x2213")
, ("\\triangleleft", ESymbol Bin "\x22B2")
, ("\\triangleright", ESymbol Bin "\x22B3")
, ("\\cdot", ESymbol Bin "\x22C5")
, ("\\star", ESymbol Bin "\x22C6")
, ("\\ast", ESymbol Bin "\x002A")
, ("\\times", ESymbol Bin "\x00D7")
, ("\\div", ESymbol Bin "\x00F7")
, ("\\circ", ESymbol Bin "\x2218")
, ("\\bullet", ESymbol Bin "\x2022")
, ("\\oplus", ESymbol Bin "\x2295")
, ("\\ominus", ESymbol Bin "\x2296")
, ("\\otimes", ESymbol Bin "\x2297")
, ("\\bigcirc", ESymbol Bin "\x25CB")
, ("\\oslash", ESymbol Bin "\x2298")
, ("\\odot", ESymbol Bin "\x2299")
, ("\\land", ESymbol Bin "\x2227")
, ("\\wedge", ESymbol Bin "\x2227")
, ("\\lor", ESymbol Bin "\x2228")
, ("\\vee", ESymbol Bin "\x2228")
, ("\\cap", ESymbol Bin "\x2229")
, ("\\cup", ESymbol Bin "\x222A")
, ("\\sqcap", ESymbol Bin "\x2293")
, ("\\sqcup", ESymbol Bin "\x2294")
, ("\\uplus", ESymbol Bin "\x228E")
, ("\\amalg", ESymbol Bin "\x2210")
, ("\\bigtriangleup", ESymbol Bin "\x25B3")
, ("\\bigtriangledown", ESymbol Bin "\x25BD")
, ("\\dag", ESymbol Bin "\x2020")
, ("\\dagger", ESymbol Bin "\x2020")
, ("\\ddag", ESymbol Bin "\x2021")
, ("\\ddagger", ESymbol Bin "\x2021")
, ("\\lhd", ESymbol Bin "\x22B2")
, ("\\rhd", ESymbol Bin "\x22B3")
, ("\\unlhd", ESymbol Bin "\x22B4")
, ("\\unrhd", ESymbol Bin "\x22B5")
, ("\\lt", ESymbol Rel "<")
, ("\\gt", ESymbol Rel ">")
, ("\\ne", ESymbol Rel "\x2260")
, ("\\neq", ESymbol Rel "\x2260")
, ("\\le", ESymbol Rel "\x2264")
, ("\\leq", ESymbol Rel "\x2264")
, ("\\leqslant", ESymbol Rel "\x2264")
, ("\\ge", ESymbol Rel "\x2265")
, ("\\geq", ESymbol Rel "\x2265")
, ("\\geqslant", ESymbol Rel "\x2265")
, ("\\equiv", ESymbol Rel "\x2261")
, ("\\ll", ESymbol Rel "\x226A")
, ("\\gg", ESymbol Rel "\x226B")
, ("\\doteq", ESymbol Rel "\x2250")
, ("\\prec", ESymbol Rel "\x227A")
, ("\\succ", ESymbol Rel "\x227B")
, ("\\preceq", ESymbol Rel "\x227C")
, ("\\succeq", ESymbol Rel "\x227D")
, ("\\subset", ESymbol Rel "\x2282")
, ("\\supset", ESymbol Rel "\x2283")
, ("\\subseteq", ESymbol Rel "\x2286")
, ("\\supseteq", ESymbol Rel "\x2287")
, ("\\nsubset", ESymbol Rel "\x2284")
, ("\\nsupset", ESymbol Rel "\x2285")
, ("\\nsubseteq", ESymbol Rel "\x2288")
, ("\\nsupseteq", ESymbol Rel "\x2289")
, ("\\sqsubset", ESymbol Rel "\x228F")
, ("\\sqsupset", ESymbol Rel "\x2290")
, ("\\sqsubseteq", ESymbol Rel "\x2291")
, ("\\sqsupseteq", ESymbol Rel "\x2292")
, ("\\sim", ESymbol Rel "\x223C")
, ("\\simeq", ESymbol Rel "\x2243")
, ("\\approx", ESymbol Rel "\x2248")
, ("\\cong", ESymbol Rel "\x2245")
, ("\\Join", ESymbol Rel "\x22C8")
, ("\\bowtie", ESymbol Rel "\x22C8")
, ("\\in", ESymbol Rel "\x2208")
, ("\\ni", ESymbol Rel "\x220B")
, ("\\owns", ESymbol Rel "\x220B")
, ("\\propto", ESymbol Rel "\x221D")
, ("\\vdash", ESymbol Rel "\x22A2")
, ("\\dashv", ESymbol Rel "\x22A3")
, ("\\models", ESymbol Rel "\x22A8")
, ("\\perp", ESymbol Rel "\x22A5")
, ("\\smile", ESymbol Rel "\x2323")
, ("\\frown", ESymbol Rel "\x2322")
, ("\\asymp", ESymbol Rel "\x224D")
, ("\\notin", ESymbol Rel "\x2209")
, ("\\gets", ESymbol Rel "\x2190")
, ("\\leftarrow", ESymbol Rel "\x2190")
, ("\\to", ESymbol Rel "\x2192")
, ("\\rightarrow", ESymbol Rel "\x2192")
, ("\\leftrightarrow", ESymbol Rel "\x2194")
, ("\\uparrow", ESymbol Rel "\x2191")
, ("\\downarrow", ESymbol Rel "\x2193")
, ("\\updownarrow", ESymbol Rel "\x2195")
, ("\\Leftarrow", ESymbol Rel "\x21D0")
, ("\\Rightarrow", ESymbol Rel "\x21D2")
, ("\\Leftrightarrow", ESymbol Rel "\x21D4")
, ("\\iff", ESymbol Rel "\x21D4")
, ("\\Uparrow", ESymbol Rel "\x21D1")
, ("\\Downarrow", ESymbol Rel "\x21D3")
, ("\\Updownarrow", ESymbol Rel "\x21D5")
, ("\\mapsto", ESymbol Rel "\x21A6")
, ("\\longleftarrow", ESymbol Rel "\x2190")
, ("\\longrightarrow", ESymbol Rel "\x2192")
, ("\\longleftrightarrow", ESymbol Rel "\x2194")
, ("\\Longleftarrow", ESymbol Rel "\x21D0")
, ("\\Longrightarrow", ESymbol Rel "\x21D2")
, ("\\Longleftrightarrow", ESymbol Rel "\x21D4")
, ("\\longmapsto", ESymbol Rel "\x21A6")
, ("\\sum", ESymbol Op "\x2211")
, ("\\prod", ESymbol Op "\x220F")
, ("\\bigcap", ESymbol Op "\x22C2")
, ("\\bigcup", ESymbol Op "\x22C3")
, ("\\bigwedge", ESymbol Op "\x22C0")
, ("\\bigvee", ESymbol Op "\x22C1")
, ("\\bigsqcap", ESymbol Op "\x2A05")
, ("\\bigsqcup", ESymbol Op "\x2A06")
, ("\\coprod", ESymbol Op "\x2210")
, ("\\bigoplus", ESymbol Op "\x2A01")
, ("\\bigotimes", ESymbol Op "\x2A02")
, ("\\bigodot", ESymbol Op "\x2A00")
, ("\\biguplus", ESymbol Op "\x2A04")
, ("\\int", ESymbol Op "\x222B")
, ("\\iint", ESymbol Op "\x222C")
, ("\\iiint", ESymbol Op "\x222D")
, ("\\oint", ESymbol Op "\x222E")
, ("\\prime", ESymbol Ord "\x2032")
, ("\\dots", ESymbol Ord "\x2026")
, ("\\ldots", ESymbol Ord "\x2026")
, ("\\cdots", ESymbol Ord "\x22EF")
, ("\\vdots", ESymbol Ord "\x22EE")
, ("\\ddots", ESymbol Ord "\x22F1")
, ("\\forall", ESymbol Op "\x2200")
, ("\\exists", ESymbol Op "\x2203")
, ("\\Re", ESymbol Ord "\x211C")
, ("\\Im", ESymbol Ord "\x2111")
, ("\\aleph", ESymbol Ord "\x2135")
, ("\\hbar", ESymbol Ord "\x210F")
, ("\\ell", ESymbol Ord "\x2113")
, ("\\wp", ESymbol Ord "\x2118")
, ("\\emptyset", ESymbol Ord "\x2205")
, ("\\infty", ESymbol Ord "\x221E")
, ("\\partial", ESymbol Ord "\x2202")
, ("\\nabla", ESymbol Ord "\x2207")
, ("\\triangle", ESymbol Ord "\x25B3")
, ("\\therefore", ESymbol Pun "\x2234")
, ("\\angle", ESymbol Ord "\x2220")
, ("\\diamond", ESymbol Op "\x22C4")
, ("\\Diamond", ESymbol Op "\x25C7")
, ("\\lozenge", ESymbol Op "\x25CA")
, ("\\neg", ESymbol Op "\x00AC")
, ("\\lnot", ESymbol Ord "\x00AC")
, ("\\bot", ESymbol Ord "\x22A5")
, ("\\top", ESymbol Ord "\x22A4")
, ("\\square", ESymbol Ord "\x25AB")
, ("\\Box", ESymbol Op "\x25A1")
, ("\\wr", ESymbol Ord "\x2240")
, ("\\!", ESpace (-3/18))
, ("\\,", ESpace (3/18))
, ("\\>", ESpace (4/18))
, ("\\:", ESpace (4/18))
, ("\\;", ESpace (5/18))
, ("\\ ", ESpace (4/18))
, ("\\quad", ESpace 1)
, ("\\qquad", ESpace 2)
, ("\\arccos", EMathOperator "arccos")
, ("\\arcsin", EMathOperator "arcsin")
, ("\\arctan", EMathOperator "arctan")
, ("\\arg", EMathOperator "arg")
, ("\\cos", EMathOperator "cos")
, ("\\cosh", EMathOperator "cosh")
, ("\\cot", EMathOperator "cot")
, ("\\coth", EMathOperator "coth")
, ("\\csc", EMathOperator "csc")
, ("\\deg", EMathOperator "deg")
, ("\\det", EMathOperator "det")
, ("\\dim", EMathOperator "dim")
, ("\\exp", EMathOperator "exp")
, ("\\gcd", EMathOperator "gcd")
, ("\\hom", EMathOperator "hom")
, ("\\inf", EMathOperator "inf")
, ("\\ker", EMathOperator "ker")
, ("\\lg", EMathOperator "lg")
, ("\\lim", EMathOperator "lim")
, ("\\liminf", EMathOperator "liminf")
, ("\\limsup", EMathOperator "limsup")
, ("\\ln", EMathOperator "ln")
, ("\\log", EMathOperator "log")
, ("\\max", EMathOperator "max")
, ("\\min", EMathOperator "min")
, ("\\Pr", EMathOperator "Pr")
, ("\\sec", EMathOperator "sec")
, ("\\sin", EMathOperator "sin")
, ("\\sinh", EMathOperator "sinh")
, ("\\sup", EMathOperator "sup")
, ("\\tan", EMathOperator "tan")
, ("\\tanh", EMathOperator "tanh")
]
-- text mode parsing
textual :: TP String
textual = regular <|> sps <|> ligature <|> textCommand <|> bracedText
<?> "text"
sps :: TP String
sps = " " <$ skipMany1 (oneOf " \t\n")
regular :: TP String
regular = many1 (noneOf "`'-~${}\\ \t")
ligature :: TP String
ligature = try ("\x2014" <$ string "---")
<|> try ("\x2013" <$ string "--")
<|> try ("\x201C" <$ string "``")
<|> try ("\x201D" <$ string "''")
<|> try ("\x2019" <$ string "'")
<|> try ("\x2018" <$ string "`")
<|> try ("\xA0" <$ string "~")
textCommand :: TP String
textCommand = do
cmd <- oneOfCommands (M.keys textCommands)
case M.lookup cmd textCommands of
Nothing -> fail ("Unknown control sequence " ++ cmd)
Just c -> c
bracedText :: TP String
bracedText = do
char '{'
inner <- concat <$> many textual
char '}'
return inner
tok :: TP Char
tok = (try $ char '{' *> spaces *> anyChar <* spaces <* char '}')
<|> anyChar
textCommands :: M.Map String (TP String)
textCommands = M.fromList
[ ("\\#", return "#")
, ("\\$", return "$")
, ("\\%", return "%")
, ("\\&", return "&")
, ("\\_", return "_")
, ("\\{", return "{")
, ("\\}", return "}")
, ("\\ldots", return "\x2026")
, ("\\textasciitilde", return "~")
, ("\\textasciicircum", return "^")
, ("\\textbackslash", return "\\")
, ("\\char", parseC)
, ("\\aa", return "å")
, ("\\AA", return "Å")
, ("\\ss", return "ß")
, ("\\o", return "ø")
, ("\\O", return "Ø")
, ("\\L", return "Ł")
, ("\\l", return "ł")
, ("\\ae", return "æ")
, ("\\AE", return "Æ")
, ("\\oe", return "œ")
, ("\\OE", return "Œ")
, ("\\`", option "`" $ grave <$> tok)
, ("\\'", option "'" $ acute <$> tok)
, ("\\^", option "^" $ circ <$> tok)
, ("\\~", option "~" $ tilde <$> tok)
, ("\\\"", option "\"" $ try $ umlaut <$> tok)
, ("\\.", option "." $ try $ dot <$> tok)
, ("\\=", option "=" $ try $ macron <$> tok)
, ("\\c", option "c" $ try $ cedilla <$> tok)
, ("\\v", option "v" $ try $ hacek <$> tok)
, ("\\u", option "u" $ try $ breve <$> tok)
, ("\\ ", return " ")
]
parseC :: TP String
parseC = try $ char '`' >> count 1 anyChar
-- the functions below taken from pandoc:
grave :: Char -> String
grave 'A' = "À"
grave 'E' = "È"
grave 'I' = "Ì"
grave 'O' = "Ò"
grave 'U' = "Ù"
grave 'a' = "à"
grave 'e' = "è"
grave 'i' = "ì"
grave 'o' = "ò"
grave 'u' = "ù"
grave c = [c]
acute :: Char -> String
acute 'A' = "Á"
acute 'E' = "É"
acute 'I' = "Í"
acute 'O' = "Ó"
acute 'U' = "Ú"
acute 'Y' = "Ý"
acute 'a' = "á"
acute 'e' = "é"
acute 'i' = "í"
acute 'o' = "ó"
acute 'u' = "ú"
acute 'y' = "ý"
acute 'C' = "Ć"
acute 'c' = "ć"
acute 'L' = "Ĺ"
acute 'l' = "ĺ"
acute 'N' = "Ń"
acute 'n' = "ń"
acute 'R' = "Ŕ"
acute 'r' = "ŕ"
acute 'S' = "Ś"
acute 's' = "ś"
acute 'Z' = "Ź"
acute 'z' = "ź"
acute c = [c]
circ :: Char -> String
circ 'A' = "Â"
circ 'E' = "Ê"
circ 'I' = "Î"
circ 'O' = "Ô"
circ 'U' = "Û"
circ 'a' = "â"
circ 'e' = "ê"
circ 'i' = "î"
circ 'o' = "ô"
circ 'u' = "û"
circ 'C' = "Ĉ"
circ 'c' = "ĉ"
circ 'G' = "Ĝ"
circ 'g' = "ĝ"
circ 'H' = "Ĥ"
circ 'h' = "ĥ"
circ 'J' = "Ĵ"
circ 'j' = "ĵ"
circ 'S' = "Ŝ"
circ 's' = "ŝ"
circ 'W' = "Ŵ"
circ 'w' = "ŵ"
circ 'Y' = "Ŷ"
circ 'y' = "ŷ"
circ c = [c]
tilde :: Char -> String
tilde 'A' = "Ã"
tilde 'a' = "ã"
tilde 'O' = "Õ"
tilde 'o' = "õ"
tilde 'I' = "Ĩ"
tilde 'i' = "ĩ"
tilde 'U' = "Ũ"
tilde 'u' = "ũ"
tilde 'N' = "Ñ"
tilde 'n' = "ñ"
tilde c = [c]
umlaut :: Char -> String
umlaut 'A' = "Ä"
umlaut 'E' = "Ë"
umlaut 'I' = "Ï"
umlaut 'O' = "Ö"
umlaut 'U' = "Ü"
umlaut 'a' = "ä"
umlaut 'e' = "ë"
umlaut 'i' = "ï"
umlaut 'o' = "ö"
umlaut 'u' = "ü"
umlaut c = [c]
dot :: Char -> String
dot 'C' = "Ċ"
dot 'c' = "ċ"
dot 'E' = "Ė"
dot 'e' = "ė"
dot 'G' = "Ġ"
dot 'g' = "ġ"
dot 'I' = "İ"
dot 'Z' = "Ż"
dot 'z' = "ż"
dot c = [c]
macron :: Char -> String
macron 'A' = "Ā"
macron 'E' = "Ē"
macron 'I' = "Ī"
macron 'O' = "Ō"
macron 'U' = "Ū"
macron 'a' = "ā"
macron 'e' = "ē"
macron 'i' = "ī"
macron 'o' = "ō"
macron 'u' = "ū"
macron c = [c]
cedilla :: Char -> String
cedilla 'c' = "ç"
cedilla 'C' = "Ç"
cedilla 's' = "ş"
cedilla 'S' = "Ş"
cedilla 't' = "ţ"
cedilla 'T' = "Ţ"
cedilla 'e' = "ȩ"
cedilla 'E' = "Ȩ"
cedilla 'h' = "ḩ"
cedilla 'H' = "Ḩ"
cedilla 'o' = "o̧"
cedilla 'O' = "O̧"
cedilla c = [c]
hacek :: Char -> String
hacek 'A' = "Ǎ"
hacek 'a' = "ǎ"
hacek 'C' = "Č"
hacek 'c' = "č"
hacek 'D' = "Ď"
hacek 'd' = "ď"
hacek 'E' = "Ě"
hacek 'e' = "ě"
hacek 'G' = "Ǧ"
hacek 'g' = "ǧ"
hacek 'H' = "Ȟ"
hacek 'h' = "ȟ"
hacek 'I' = "Ǐ"
hacek 'i' = "ǐ"
hacek 'j' = "ǰ"
hacek 'K' = "Ǩ"
hacek 'k' = "ǩ"
hacek 'L' = "Ľ"
hacek 'l' = "ľ"
hacek 'N' = "Ň"
hacek 'n' = "ň"
hacek 'O' = "Ǒ"
hacek 'o' = "ǒ"
hacek 'R' = "Ř"
hacek 'r' = "ř"
hacek 'S' = "Š"
hacek 's' = "š"
hacek 'T' = "Ť"
hacek 't' = "ť"
hacek 'U' = "Ǔ"
hacek 'u' = "ǔ"
hacek 'Z' = "Ž"
hacek 'z' = "ž"
hacek c = [c]
breve :: Char -> String
breve 'A' = "Ă"
breve 'a' = "ă"
breve 'E' = "Ĕ"
breve 'e' = "ĕ"
breve 'G' = "Ğ"
breve 'g' = "ğ"
breve 'I' = "Ĭ"
breve 'i' = "ĭ"
breve 'O' = "Ŏ"
breve 'o' = "ŏ"
breve 'U' = "Ŭ"
breve 'u' = "ŭ"
breve c = [c]
|
timtylin/scholdoc-texmath
|
src/Text/TeXMath/Readers/TeX.hs
|
gpl-2.0
| 39,826 | 0 | 19 | 11,857 | 13,169 | 6,970 | 6,199 | 1,047 | 11 |
module AVR.AVRState where
import qualified AVR.StatusReg as S
import Data.Bits
import Data.List (intercalate, transpose)
import Data.List.Split (chunksOf)
import Data.Maybe (fromMaybe)
import Text.Printf (printf)
import Data.Vector (Vector, (!), (//), (!?))
import qualified Data.Vector as V
import Data.Word (Word8, Word16)
import Control.Applicative
----------------
-- DATA TYPES --
----------------
type ProgramCounter = Word16
-- | The AVR has 32 general purpose registers
data RegNum = R0 | R1 | R2 | R3 |
R4 | R5 | R6 | R7 |
R8 | R9 | R10 | R11 |
R12 | R13 | R14 | R15 |
R16 | R17 | R18 | R19 |
R20 | R21 | R22 | R23 |
R24 | R25 | R26 | R27 |
R28 | R29 | R30 | R31
deriving (Eq, Enum, Show)
-- | The AVR utilizes the following register pairs for addressing
-- | W = R25:R24, X = R27:26, Y = R29:R28, Z = R31:R30
data AddressRegNum = W | X | Y | Z
deriving (Eq, Enum, Show)
-- | Registers are 8-bits wide
type Reg = Word8
-- | Register pairs are 16-bits wide
type WideReg = Word16
-- | Represents the 32 general purpose registers
type RegFile = [Reg]
type IOAddress = Word8
type RamAddress = Word16
data AVRState = AVRState {
programCounter :: ProgramCounter,
regFile :: RegFile,
sreg :: S.StatusReg,
programMemory :: Vector Word16,
ioRegs :: Vector Word8,
ram :: Vector Word8,
skipInstruction :: Bool,
cycles :: Integer
} deriving (Show)
-- | The starting state of the processor, with the given program memory
initialState :: Vector Word16 -> AVRState
initialState pmem = AVRState {
programCounter = 0,
regFile = replicate 32 0x00,
sreg = S.empty,
programMemory = pmem,
ioRegs = V.replicate 64 0x00,
ram = V.replicate 256 0x00,
skipInstruction = False,
cycles = 0
}
getPC :: AVRState -> ProgramCounter
getPC = programCounter
setPC :: ProgramCounter -> AVRState -> AVRState
setPC pc state = state {programCounter = pc}
--------------------------------
-- REGISTER FILE MANIPULATION --
--------------------------------
-- | The register number which holds the lower byte of this address register
addressPairNum :: AddressRegNum -> RegNum
addressPairNum W = R24
addressPairNum X = R26
addressPairNum Y = R28
addressPairNum Z = R30
-- | Retrieves the value of a register
getReg :: RegNum -> AVRState -> Reg
getReg num (AVRState {regFile=regs}) = regs !! fromEnum num
-- | Retrieves a register pair, where the specified reg number represents the lower-byte of the pair
getRegPair :: RegNum -> AVRState -> WideReg
getRegPair num state = (rh `shiftL` 8) + rl
where
[rl, rh] = map (fromIntegral . flip getReg state) [num, succ num]
-- | Retrieves the value of an address register
getAddressReg :: AddressRegNum -> AVRState -> WideReg
getAddressReg = getRegPair . addressPairNum
-- | Sets a register
setReg :: RegNum -> Word8 -> AVRState -> AVRState
setReg num val state = state {regFile = left ++ [val] ++ right}
where
regs = regFile state
(left, _:right) = splitAt (fromEnum num) regs
-- | Sets a pair of registers
setRegPair :: RegNum -> Word16 -> AVRState -> AVRState
setRegPair num val = setReg rh high . setReg rl low
where
[rh, rl] = [succ num, num]
low = fromIntegral (val .&. 0x00FF)
high = fromIntegral (val `shiftR` 8)
-- | Sets the value of an address register
setAddressReg :: AddressRegNum -> Word16 -> AVRState -> AVRState
setAddressReg = setRegPair . addressPairNum
-- | Pretty prints a reg file as a table
prettyRegFile :: AVRState -> String
prettyRegFile state = unlines $ map (intercalate " | " . map showReg) rows
where
rows = transpose . chunksOf 8 . enumFrom $ R0
showReg num = printf "R%02d: %02X" (fromEnum num) (getReg num state)
prettyPrintRegs :: AVRState -> String
prettyPrintRegs = unlines . zipWith ($) funcs . repeat
where
funcs = [prettyRegFile,
printf "X : %04X" . getAddressReg X,
printf "Y : %04X" . getAddressReg Y,
printf "Z : %04X" . getAddressReg Z,
printf "SP: %04X" . getSP,
printf "PC: %04X" . getPC,
show . sreg
]
-------------------------
-- MEMORY MANIPULATION --
-------------------------
-- | Reads an IO register
readIOReg :: IOAddress -> AVRState -> Word8
readIOReg addr state = ioRegs state ! fromIntegral addr
-- | Writes to an IO register
writeIOReg :: IOAddress -> Word8 -> AVRState -> AVRState
writeIOReg addr val state = state {ioRegs = ioRegs'}
where
ioRegs' = ioRegs state // [(fromIntegral addr, val)]
-- | Reads a location in SRAM
readRam :: RamAddress -> AVRState -> Word8
readRam addr state = ram state ! fromIntegral addr
-- | Writes to a location in SRAM
writeRam :: RamAddress -> Word8 -> AVRState -> AVRState
writeRam addr val state = state {ram = ram'}
where
ram' = ram state // [(fromIntegral addr, val)]
-- | Helper function for accessing data memory space
accessDMem :: (RegNum -> a, IOAddress -> a, RamAddress -> a) -> Word16 -> a
accessDMem (regOp, ioRegOp, ramOp) addr
| addr < 32 = regOp rnum
| (addr - 32) < 64 = ioRegOp ioAddr
| otherwise = ramOp ramAddr
where
rnum = toEnum $ fromIntegral addr
ioAddr = fromIntegral $ addr - 32
ramAddr = fromIntegral $ addr - 96
-- | Reads a value from the data memory, which maps the register file, io regs, and SRAM
readDMem :: Word16 -> AVRState -> Word8
readDMem = accessDMem (getReg, readIOReg, readRam)
-- | Writes a value to the reg file, io regs, or ram, depending on the address
writeDMem :: Word16 -> Word8 -> AVRState -> AVRState
writeDMem = accessDMem (setReg, writeIOReg, writeRam)
readPMem8 :: Word16 -> AVRState -> Word8
readPMem8 addr state = if testBit addr 0 then hi else lo
where
val = readPMem16 (addr `shiftR` 1) state
hi = fromIntegral $ val `shiftR` 8
lo = fromIntegral $ val .&. 0xFF
-- | Reads two bytes from program memory
readPMem16 :: Word16 -> AVRState -> Word16
readPMem16 addr state = fromMaybe 0xFFFF (programMemory state !? fromIntegral addr)
------------------------
-- STACK MANIPULATION --
------------------------
-- | The current stack pointer value
getSP :: AVRState -> Word16
getSP state = (sph `shiftL` 8) + spl
where
sph = fromIntegral $ readIOReg 0x3E state
spl = fromIntegral $ readIOReg 0x3D state
-- | Sets the stack pointer
setSP :: Word16 -> AVRState -> AVRState
setSP sp = writeIOReg 0x3E sph . writeIOReg 0x3D spl
where
sph = fromIntegral $ sp `shiftR` 8
spl = fromIntegral $ sp .&. 0x00FF
-- | Increments the stack pointer by one
incSP :: AVRState -> AVRState
incSP = setSP =<< (+1) . getSP
-- | Decrements the stack pointer by one
decSP :: AVRState -> AVRState
decSP = setSP =<< subtract 1 . getSP
-- | Pushes a value onto the stack, this also decrements the stack pointer
stackPush :: Word8 -> AVRState -> AVRState
stackPush val = decSP . (writeDMem' val =<< getSP)
where writeDMem' = flip writeDMem
-- | Pops a value off the stack, this also increments the stack pointer
stackPop :: AVRState -> AVRState
stackPop = incSP
-- | Looks at the value at the top of the stack
stackPeek :: AVRState -> Word8
stackPeek = readDMem =<< (+1) . getSP
-- | Pushes the PC onto the stack
stackPush16 :: Word16 -> AVRState -> AVRState
stackPush16 val = do
let lo = fromIntegral (val .&. 0xFF)
hi = fromIntegral (val `shiftR` 8)
stackPush hi . stackPush lo
-- | Looks at the PC saved on the stack
stackPeek16 :: AVRState -> Word16
stackPeek16 = do
hi <- fromIntegral <$> stackPeek
lo <- fromIntegral <$> (stackPeek . stackPop)
return (hi `shiftL` 8 + lo)
-- | Removes the PC from the stack
stackPop16 :: AVRState -> AVRState
stackPop16 = stackPop . stackPop
-----------------------
-- UTILITY FUNCTIONS --
-----------------------
clearBits :: (Bits a) => [Int] -> a -> a
clearBits = foldl (.) id . map (flip clearBit)
onLow :: (Bits a, Integral a) => (Word8 -> Word8) -> a -> a
onLow func = (.|.) <$> clearBits [0..7] <*> fromIntegral . func . fromIntegral
onHigh :: (Bits a, Integral a) => (Word8 -> Word8) -> a -> a
onHigh func = (.|.) <$> clearBits [8..15] <*> (`shiftL` 8) . fromIntegral . func . fromIntegral . (`shiftR` 8)
|
dhrosa/haskell-avr-simulator
|
AVR/AVRState.hs
|
gpl-3.0
| 8,291 | 0 | 12 | 1,883 | 2,283 | 1,274 | 1,009 | -1 | -1 |
module QHaskell.Variable.Scoped
(Var(..),prd,inc) where
import QHaskell.MyPrelude
import qualified QHaskell.Nat.ADT as NA
data Var :: NA.Nat -> * where
Zro :: Var (NA.Suc n)
Suc :: Var n -> Var (NA.Suc n)
deriving instance Eq (Var n)
deriving instance Ord (Var n)
int :: Var n -> Word32
int Zro = 0
int (Suc x) = 1 + int x
instance Show (Var n) where
show v = show (int v)
prd :: Var (NA.Suc n) -> Var n
prd (Suc x) = x
prd _ = impossible
inc :: (Var n -> Var n') ->
Var (NA.Suc n) -> Var (NA.Suc n')
inc _ Zro = Zro
inc f (Suc x) = Suc (f x)
|
shayan-najd/QHaskell
|
QHaskell/Variable/Scoped.hs
|
gpl-3.0
| 583 | 0 | 10 | 155 | 316 | 163 | 153 | -1 | -1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 [email protected]
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.Run.Scene.Plain.SoundScene
(
soundSceneNoise,
) where
import MyPrelude
import Game
import Game.Run.RunData
import Game.Grid.GridWorld.Node
import OpenAL
import OpenAL.Helpers
soundSceneNoise :: SoundScene -> UInt -> Float -> Node -> IO ()
soundSceneNoise sound ix pitch node = do
alSourceStop (soundSceneNoiseSrc sound)
alSourcei (soundSceneNoiseSrc sound) al_BUFFER
$ fI $ noisearrayAt (soundSceneNoiseBufs sound) ix
alSourcef (soundSceneNoiseSrc sound) al_PITCH $ rTF pitch
case node of
Node x y z -> alSource3f (soundSceneNoiseSrc sound) al_POSITION
(fI x) (fI y) (fI z)
alSourcePlay $ soundSceneNoiseSrc sound
|
karamellpelle/grid
|
source/Game/Run/Scene/Plain/SoundScene.hs
|
gpl-3.0
| 1,465 | 0 | 12 | 306 | 231 | 126 | 105 | 19 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.YouTube.LiveChatModerators.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists moderators for a live chat.
--
-- /See:/ <https://developers.google.com/youtube/v3 YouTube Data API Reference> for @youtube.liveChatModerators.list@.
module Network.Google.Resource.YouTube.LiveChatModerators.List
(
-- * REST Resource
LiveChatModeratorsListResource
-- * Creating a Request
, liveChatModeratorsList
, LiveChatModeratorsList
-- * Request Lenses
, livPart
, livLiveChatId
, livPageToken
, livMaxResults
) where
import Network.Google.Prelude
import Network.Google.YouTube.Types
-- | A resource alias for @youtube.liveChatModerators.list@ method which the
-- 'LiveChatModeratorsList' request conforms to.
type LiveChatModeratorsListResource =
"youtube" :>
"v3" :>
"liveChat" :>
"moderators" :>
QueryParam "liveChatId" Text :>
QueryParam "part" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :>
Get '[JSON] LiveChatModeratorListResponse
-- | Lists moderators for a live chat.
--
-- /See:/ 'liveChatModeratorsList' smart constructor.
data LiveChatModeratorsList = LiveChatModeratorsList'
{ _livPart :: !Text
, _livLiveChatId :: !Text
, _livPageToken :: !(Maybe Text)
, _livMaxResults :: !(Textual Word32)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'LiveChatModeratorsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'livPart'
--
-- * 'livLiveChatId'
--
-- * 'livPageToken'
--
-- * 'livMaxResults'
liveChatModeratorsList
:: Text -- ^ 'livPart'
-> Text -- ^ 'livLiveChatId'
-> LiveChatModeratorsList
liveChatModeratorsList pLivPart_ pLivLiveChatId_ =
LiveChatModeratorsList'
{ _livPart = pLivPart_
, _livLiveChatId = pLivLiveChatId_
, _livPageToken = Nothing
, _livMaxResults = 5
}
-- | The part parameter specifies the liveChatModerator resource parts that
-- the API response will include. Supported values are id and snippet.
livPart :: Lens' LiveChatModeratorsList Text
livPart = lens _livPart (\ s a -> s{_livPart = a})
-- | The liveChatId parameter specifies the YouTube live chat for which the
-- API should return moderators.
livLiveChatId :: Lens' LiveChatModeratorsList Text
livLiveChatId
= lens _livLiveChatId
(\ s a -> s{_livLiveChatId = a})
-- | The pageToken parameter identifies a specific page in the result set
-- that should be returned. In an API response, the nextPageToken and
-- prevPageToken properties identify other pages that could be retrieved.
livPageToken :: Lens' LiveChatModeratorsList (Maybe Text)
livPageToken
= lens _livPageToken (\ s a -> s{_livPageToken = a})
-- | The maxResults parameter specifies the maximum number of items that
-- should be returned in the result set.
livMaxResults :: Lens' LiveChatModeratorsList Word32
livMaxResults
= lens _livMaxResults
(\ s a -> s{_livMaxResults = a})
. _Coerce
instance GoogleRequest LiveChatModeratorsList where
type Rs LiveChatModeratorsList =
LiveChatModeratorListResponse
type Scopes LiveChatModeratorsList =
'["https://www.googleapis.com/auth/youtube",
"https://www.googleapis.com/auth/youtube.force-ssl",
"https://www.googleapis.com/auth/youtube.readonly"]
requestClient LiveChatModeratorsList'{..}
= go (Just _livLiveChatId) (Just _livPart)
_livPageToken
(Just _livMaxResults)
(Just AltJSON)
youTubeService
where go
= buildClient
(Proxy :: Proxy LiveChatModeratorsListResource)
mempty
|
rueshyna/gogol
|
gogol-youtube/gen/Network/Google/Resource/YouTube/LiveChatModerators/List.hs
|
mpl-2.0
| 4,667 | 0 | 16 | 1,091 | 574 | 339 | 235 | 89 | 1 |
-- yammat - Yet Another MateMAT
-- Copyright (C) 2015 Amedeo Molnár
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU Affero General Public License as published
-- by the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU Affero General Public License for more details.
--
-- You should have received a copy of the GNU Affero General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Application
( getApplicationDev
, appMain
, develMain
, makeFoundation
) where
import Control.Monad.Logger (liftLoc, runLoggingT)
import Database.Persist.Postgresql (createPostgresqlPool, pgConnStr,
pgPoolSize, runSqlPool)
import Import
import Language.Haskell.TH.Syntax (qLocation)
import Network.Wai.Handler.Warp (Settings, defaultSettings,
defaultShouldDisplayException,
runSettings, setHost,
setOnException, setPort)
import Network.Wai.Middleware.RequestLogger (Destination (Logger),
IPAddrSource (..),
OutputFormat (..), destination,
mkRequestLogger, outputFormat)
import System.Log.FastLogger (defaultBufSize, newStdoutLoggerSet,
toLogStr)
-- Import all relevant handler modules here.
-- Don't forget to add new modules to your cabal file!
import Handler.Common
import Handler.Home
import Handler.Select
import Handler.Restock
import Handler.NewUser
import Handler.Buy
import Handler.Journal
import Handler.Payout
import Handler.Summary
import Handler.Modify
import Handler.CashCheck
import Handler.Avatar
import Handler.Barcode
import Handler.Transfer
import Handler.Supplier
import Handler.SupplierActions
import Handler.Demand
import Handler.Statistics
import Handler.Pinentry
-- This line actually creates our YesodDispatch instance. It is the second half
-- of the call to mkYesodData which occurs in Foundation.hs. Please see the
-- comments there for more details.
mkYesodDispatch "App" resourcesApp
-- | This function allocates resources (such as a database connection pool),
-- performs initialization and return a foundation datatype value. This is also
-- the place to put your migrate statements to have automatic database
-- migrations handled by Yesod.
makeFoundation :: AppSettings -> IO App
makeFoundation appSettings = do
-- Some basic initializations: HTTP connection manager, logger, and static
-- subsite.
appHttpManager <- newManager
appLogger <- newStdoutLoggerSet defaultBufSize >>= makeYesodLogger
appStatic <-
(if appMutableStatic appSettings then staticDevel else static)
(appStaticDir appSettings)
-- We need a log function to create a connection pool. We need a connection
-- pool to create our foundation. And we need our foundation to get a
-- logging function. To get out of this loop, we initially create a
-- temporary foundation without a real connection pool, get a log function
-- from there, and then create the real foundation.
let mkFoundation appConnPool = App {..}
tempFoundation = mkFoundation $ error "connPool forced in tempFoundation"
logFunc = messageLoggerSource tempFoundation appLogger
-- Create the database connection pool
pool <- flip runLoggingT logFunc $ createPostgresqlPool
(pgConnStr $ appDatabaseConf appSettings)
(pgPoolSize $ appDatabaseConf appSettings)
-- Perform database migration using our application's logging settings.
runLoggingT (runSqlPool (runMigration migrateAll) pool) logFunc
-- Return the foundation
return $ mkFoundation pool
-- | Convert our foundation to a WAI Application by calling @toWaiAppPlain@ and
-- applyng some additional middlewares.
makeApplication :: App -> IO Application
makeApplication foundation = do
logWare <- mkRequestLogger def
{ outputFormat =
if appDetailedRequestLogging $ appSettings foundation
then Detailed True
else Apache
(if appIpFromHeader $ appSettings foundation
then FromFallback
else FromSocket)
, destination = Logger $ loggerSet $ appLogger foundation
}
-- Create the WAI application and apply middlewares
appPlain <- toWaiAppPlain foundation
return $ logWare $ defaultMiddlewaresNoLogging appPlain
-- | Warp settings for the given foundation value.
warpSettings :: App -> Settings
warpSettings foundation =
setPort (appPort $ appSettings foundation)
$ setHost (appHost $ appSettings foundation)
$ setOnException (\_req e ->
when (defaultShouldDisplayException e) $ messageLoggerSource
foundation
(appLogger foundation)
$(qLocation >>= liftLoc)
"yesod"
LevelError
(toLogStr $ "Exception from Warp: " ++ show e))
defaultSettings
-- | For yesod devel, return the Warp settings and WAI Application.
getApplicationDev :: IO (Settings, Application)
getApplicationDev = do
settings <- loadYamlSettings [configSettingsYml] [] useEnv
foundation <- makeFoundation settings
app <- makeApplication foundation
wsettings <- getDevSettings $ warpSettings foundation
return (wsettings, app)
-- | main function for use by yesod devel
develMain :: IO ()
develMain = develMainHelper getApplicationDev
-- | The @main@ function for an executable running this site.
appMain :: IO ()
appMain = do
-- Get the settings from all relevant sources
settings <- loadYamlSettingsArgs
-- fall back to compile-time values, set to [] to require values at runtime
[configSettingsYmlValue]
-- allow environment variables to override
useEnv
-- Generate the foundation from the settings
foundation <- makeFoundation settings
-- Generate a WAI Application from the foundation
app <- makeApplication foundation
-- Run the application with Warp
runSettings (warpSettings foundation) app
|
nek0/yammat
|
Application.hs
|
agpl-3.0
| 6,689 | 0 | 16 | 1,705 | 891 | 491 | 400 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Millionaire where
import Control.Applicative
import Control.Monad
import Data.Aeson
import Data.ByteString.Lazy.UTF8 (fromString)
import Data.List (intercalate)
import Data.Maybe (fromMaybe)
import Data.Map (Map)
import qualified Data.Map as Map
data Question = Question { qText :: String
, qAnswer :: String
, qChoices :: Map String String
}
instance Show Question where
show (Question question _ choices) =
question ++ "\n\n" ++
intercalate "\n" (map showChoice $ Map.toList choices) ++ "\n"
where
showChoice :: (String, String) -> String
showChoice choice = fst choice ++ ") " ++ snd choice
instance FromJSON Question where
parseJSON (Object v) = Question <$> v .: "question"
<*> v .: "answer"
<*> v .: "choices"
parseJSON _ = mzero
isRight :: Question -> String -> Bool
isRight (Question _ rightAnswer _) userAnswer = userAnswer == rightAnswer
getRight :: Question -> String
getRight (Question _ rightAnswer _) = rightAnswer
getQuestions :: FilePath -> IO [Question]
getQuestions filepath = do
questionsFile <- readFile filepath
let questions = (decode . fromString) questionsFile :: Maybe [Question]
return $ fromMaybe [] questions
askQuestion :: Question -> IO Bool
askQuestion question = do
print question
putStrLn "Answer: "
answer <- getLine
return $ isRight question answer
|
ayakovlenko/millionaire
|
src/main/Millionaire.hs
|
unlicense
| 1,534 | 0 | 12 | 404 | 443 | 230 | 213 | 39 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE PolyKinds #-}
module Type.Operators where
import Data.Typeable
import Prelude
import GHC.Exts (Constraint)
import GHC.TypeLits
import Type.Bool
import Type.Container
import Type.Wrapped
-- | The `$$` operator is just like `$` one but with even lower precedence level.
-- Unlike value-level `$`, the type-level one has precedence level of `infixr 1`
-- in order to be used in function arguments, like `edge :: Node $ Source a -> Node $ Target a -> a`.
-- The `$$` operator has higher precedence than `->`, so the above expression would not be valid when using it.
infixr 0 $$
type f $$ a = f a
infixr 1 $
type f $ a = f a
infixl 1 &
type a & f = f a
|
wdanilo/typelevel
|
src/Type/Operators.hs
|
apache-2.0
| 878 | 0 | 5 | 193 | 105 | 67 | 38 | 20 | 0 |
{-
Copyright 2016, Dominic Orchard, Andrew Rice, Mistral Contrastin, Matthew Danish
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Camfort.Specification.Stencils.Annotation () where
import Camfort.Analysis.Annotations
import Camfort.Analysis.CommentAnnotator
import qualified Camfort.Specification.Stencils.Grammar as Gram
import qualified Language.Fortran.AST as F
import qualified Language.Fortran.Analysis as FA
{- *** Routines for associating annotations to ASTs -}
-- Instances for embedding parsed specifications into the AST
instance ASTEmbeddable (FA.Analysis Annotation) Gram.Specification where
annotateWithAST ann ast =
onPrev (\ann -> ann { stencilSpec = Just $ Left ast }) ann
instance Linkable (FA.Analysis Annotation) where
link ann (b@(F.BlDo {})) =
onPrev (\ann -> ann { stencilBlock = Just b }) ann
link ann (b@(F.BlStatement _ _ _ (F.StExpressionAssign {}))) =
onPrev (\ann -> ann { stencilBlock = Just b }) ann
link ann _ = ann
linkPU ann _ = ann
|
mrd/camfort
|
src/Camfort/Specification/Stencils/Annotation.hs
|
apache-2.0
| 1,592 | 0 | 13 | 283 | 262 | 152 | 110 | 18 | 0 |
module Main (main) where
import Control.Applicative ((<$>))
import Control.Error.Script (runScript,
scriptIO)
import Control.Error.Safe (tryHead)
import System.Environment (getArgs)
import Buster.Pool (newPool,
startPool,
stopPool)
import Buster.Config (installReloadHandler,
reloadConfig,
summonConfig,
setupConfigWatch)
import Buster.Types
import Buster.Logger
import Buster.Monitor
main :: IO ()
main = runScript $ do
args <- scriptIO getArgs
configFile <- tryHead "Specify a config file" args
scriptIO $ do
configWatch <- setupConfigWatch
debugM "Loading initial config"
reloadConfig configFile configWatch
debugM "Installing Signal Handlers"
installReloadHandler configFile configWatch
run configFile configWatch
--TODO: takMVar is a leaky abstraction here
run :: FilePath -> ConfigWatch -> IO ()
run configFile configWatch = runScript $ scriptIO $ runWithConfig configFile configWatch =<< summonConfig configWatch
runWithConfig :: FilePath -> ConfigWatch -> Config -> IO ()
runWithConfig configFile configWatch cfg = do
stoppedPool <- newPool cfg
pool <- startPool stoppedPool
newCfg <- withMonitoring $ \inotify -> do
mayWatchDesc <- if configMonitor cfg
then
Just <$> installMonitor inotify configFile configWatch
else return Nothing
newCfg <- summonConfig configWatch
case mayWatchDesc of
Just desc -> uninstallMonitor configFile desc
_ -> return ()
return newCfg
stopPool pool
runWithConfig configFile configWatch newCfg
|
MichaelXavier/Buster
|
src/Buster/Main.hs
|
bsd-2-clause
| 1,808 | 0 | 16 | 555 | 412 | 203 | 209 | 44 | 3 |
{-# LANGUAGE OverloadedStrings
#-}
import System.Environment
import System.Process (runInteractiveProcess, waitForProcess, ProcessHandle)
import System.IO (Handle, stderr, stdout, stdin, hClose)
import Data.ByteString (ByteString, pack, hGetContents)
import qualified Data.ByteString
import Data.ByteString.Char8 (unpack, hPutStrLn)
import qualified Data.ByteString.Char8 as Char8
import Data.Word
import Control.Applicative
import Control.Monad
import qualified Text.Printf
import Data.IORef
import Data.List
import Test.QuickCheck
import Test.QuickCheck.Monadic
import Data.ByteString.ShellEscape hiding (sh, bash)
-- It is best to implement the echo test with `printf':
--
-- . Some echo implementations always interpret backslash escapes like \3
-- and give us no way to turn it off. Dash is like this.
--
-- . GNU echo can not be made to simply ignore options like `--help'.
--
-- The `printf' implementations available to me -- Bash and Dash --
-- consistently ignore backslash escape sequences and any options that follow
-- the format argument.
--
printf :: (Shell t, Escape t) => t -> IO ByteString
printf escaped = do
(i, o, e, p) <- shell escaped cmd
exit <- waitForProcess p
hGetContents o <* mapM_ hClose [i, o, e]
where
cmd = "printf '%s' " ++ unpack (bytes escaped)
prop_echoBash :: ByteString -> Property
prop_echoBash = something_prop escapeBash
prop_echoSh :: ByteString -> Property
prop_echoSh = something_prop escapeSh
something_prop esc b = monadicIO $ do
assert =<< run (test printf b (esc b))
escapeSh :: ByteString -> Sh
escapeSh b = escape b
escapeBash :: ByteString -> Bash
escapeBash b = escape b
test cmd original escaped = do
b <- cmd escaped
Data.ByteString.appendFile "./lengths" (displayLength original)
Data.ByteString.appendFile "./chars" (Char8.unlines $ displayBytes original)
when (b /= original) $ do
err "Result differs from unescaped result:"
err "Escaped form:"
err (Char8.unwords . displayBytes $ bytes escaped)
err "Output:"
err (Char8.unwords . displayBytes $ b)
err "Original:"
err (Char8.unwords . displayBytes $ original)
return (b == original)
displayBytes = fmap Char8.pack
. fmap (\w -> Text.Printf.printf "0x%02x" w)
. Data.ByteString.unpack
displayLength = Char8.pack
. (\w -> Text.Printf.printf "%4d\n" w)
. Data.ByteString.length
class (Escape t) => Shell t where
shell :: t -> String -> IO (Handle, Handle, Handle, ProcessHandle)
instance Shell Sh where
shell _ = sh
instance Shell Bash where
shell _ = bash
sh s = runInteractiveProcess "sh" ["-c", s] Nothing Nothing
bash s = runInteractiveProcess "bash" ["-c", s] Nothing Nothing
instance Arbitrary ByteString where
arbitrary = do
bytes <- arbitrary :: Gen [NonZero Word8]
NonEmpty bytes' <- arbitrary :: Gen (NonEmptyList (NonZero Word8))
pack `fmap` elements
(fmap unNonZero `fmap` [bytes', bytes', bytes, bytes', bytes'])
where
unNonZero (NonZero t) = t
main = do
runSh <- newIORef True
runBash <- newIORef True
testsR <- newIORef 10000
let testsRwrite = writeIORef testsR
noBash = writeIORef runBash False
noSh = writeIORef runSh False
args <- getArgs
case (sort . nub) args of
["--bash", "--sh", d] -> testsRwrite (read d)
["--bash", "--sh" ] -> return ()
["--bash", d] -> noSh >> testsRwrite (read d)
[ "--sh", d] -> noBash >> testsRwrite (read d)
[ d] -> testsRwrite (read d)
[ ] -> return ()
_ -> error "Invalid arguments."
tests <- readIORef testsR
let msg = "Performing " ++ show tests ++ " tests."
qcArgs = Args Nothing tests tests 32 False
qc = quickCheckWith qcArgs
err "Tests are random ByteStrings, containing any byte but null."
runSh ?> do
err "Testing Sh escaping."
err (Char8.pack msg)
qc prop_echoSh
runBash ?> do
err "Testing Bash escaping."
err (Char8.pack msg)
qc prop_echoBash
err = hPutStrLn stderr
(?>) :: IORef Bool -> IO () -> IO ()
ref ?> action = readIORef ref >>= (`when` action)
|
solidsnack/shell-escape
|
test/Echo.hs
|
bsd-3-clause
| 4,956 | 0 | 14 | 1,721 | 1,282 | 655 | 627 | 99 | 7 |
#define IncludedshiftNewIndicesRight
#ifdef IncludedshiftIndicesRight
#else
#include "../Proofs/shiftIndicesRight.hs"
#endif
{-@ shiftNewIndicesRight
:: xi:RString
-> yi:RString
-> zi:RString
-> tg:{RString | stringLen tg <= stringLen yi }
-> { map (shiftStringRight tg xi (yi <+> zi)) (makeNewIndices yi zi tg) == makeNewIndices (xi <+> yi) zi tg }
@-}
shiftNewIndicesRight :: RString -> RString -> RString -> RString -> Proof
shiftNewIndicesRight xi yi zi tg
| stringLen tg < 2
= makeNewIndices (xi <+> yi) zi tg
==. N
==. map (shiftStringRight tg xi (yi <+> zi)) N
==. map (shiftStringRight tg xi (yi <+> zi)) (makeNewIndices yi zi tg)
*** QED
shiftNewIndicesRight xi yi zi tg
= makeNewIndices (xi <+> yi) zi tg
==. makeIndices ((xi <+> yi) <+> zi) tg
(maxInt (stringLen (xi <+> yi) - (stringLen tg - 1)) 0)
(stringLen (xi <+> yi) - 1 )
==. makeIndices (xi <+> (yi <+> zi)) tg
(stringLen xi + stringLen yi - stringLen tg + 1)
(stringLen xi + stringLen yi - 1 )
?stringAssoc xi yi zi
==. map (shiftStringRight tg xi (yi <+> zi)) (makeIndices (yi <+> zi) tg (stringLen yi - stringLen tg + 1) (stringLen yi - 1))
?shiftIndicesRight (stringLen yi - stringLen tg + 1)
(stringLen yi - 1)
xi
(yi <+> zi)
tg
==. map (shiftStringRight tg xi (yi <+> zi))
(makeIndices (yi <+> zi) tg
(maxInt (stringLen yi - (stringLen tg -1)) 0)
(stringLen yi -1))
==. map (shiftStringRight tg xi (yi <+> zi))
(makeNewIndices yi zi tg)
*** QED
|
nikivazou/verified_string_matching
|
src/Proofs/shiftNewIndicesRight.hs
|
bsd-3-clause
| 1,754 | 0 | 18 | 581 | 562 | 283 | 279 | 30 | 1 |
{-# LANGUAGE CPP #-}
module Lang where
import DynFlags
import Types
listLanguages :: Options -> IO String
#if __GLASGOW_HASKELL__ >= 700
listLanguages opt = return $ convert opt supportedLanguagesAndExtensions
#else
listLanguages opt = return $ convert opt supportedLanguages
#endif
|
conal/ghc-mod
|
Lang.hs
|
bsd-3-clause
| 286 | 0 | 6 | 42 | 43 | 25 | 18 | 6 | 1 |
module ProjectEuler.Problem038 (solution038) where
import Data.Digits
import Data.List
isPandigital :: [Integer] -> Bool
isPandigital = (== [1..9]) . sort
applyProperty :: Integer -> [Integer]
applyProperty = take 9 . concatMap (digits 10) . flip map [1..9] . (*)
genericSolution :: [Integer] -> Integer
genericSolution = unDigits 10 . last . filter isPandigital . map applyProperty
solution038 :: Integer
solution038 = genericSolution [1..10000]
|
guillaume-nargeot/project-euler-haskell
|
src/ProjectEuler/Problem038.hs
|
bsd-3-clause
| 452 | 0 | 10 | 67 | 160 | 88 | 72 | 11 | 1 |
#!/usr/bin/env stack
-- stack --resolver lts-8.0 --install-ghc runghc --package text --package optparse-applicative
{-# LANGUAGE OverloadedStrings #-}
module BeGone where
import qualified Data.Char as Char
import qualified Data.Foldable as Foldable
import Data.Monoid
import Data.Text(Text)
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import Options.Applicative
import System.IO
main :: IO ()
main = do
options <- execParser optionsParserInfo
withMaybeFile (inputFile options) ReadMode stdin $ \inputHandle ->
withMaybeFile (outputFile options) WriteMode stdout $ \outputHandle -> do
input <- Text.hGetContents inputHandle
Text.hPutStr outputHandle $ bracesBeGone options input
where
withMaybeFile Nothing _ def k = k def
withMaybeFile (Just filePath) mode def k = withFile filePath mode k
-------------------------------------------------------------------------------
-- * Command-line options and stuff
data Options = Options
{ inputFile :: Maybe FilePath
, outputFile :: Maybe FilePath
, tabWidth :: Int
, minLineWidth :: Int
, braceChars :: String
} deriving (Show)
optionsParserInfo :: ParserInfo Options
optionsParserInfo = info (helper <*> optionsParser)
$ fullDesc <> header "Braces Be Gone"
optionsParser :: Parser Options
optionsParser = Options
<$> optional (strArgument
$ metavar "FILE"
<> help "Input source FILE (default: stdin)"
<> action "file"
)
<*> optional (strOption
$ long "output"
<> short 'o'
<> metavar "FILE"
<> help "Write output to FILE (default: stdout)"
<> action "file"
)
<*> option auto
(long "tab-width"
<> metavar "TABWIDTH"
<> help "Count tab characters as TABWIDTH spaces (default: 8)"
<> value 8
)
<*> option auto
(long "min-line-width"
<> metavar "LINEWIDTH"
<> help "Align braces at least to LINEWIDTH (default: 0)"
<> value 0
)
<*> strOption
(long "brace-chars"
<> metavar "CHARS"
<> help "Use CHARS as braces (default: \"{};\")"
<> value "{};"
)
-------------------------------------------------------------------------------
-- * Here's where the magic happens
bracesBeGone :: Options -> Text -> Text
bracesBeGone options input = Text.unlines paddedLines
where
brokenLines = joinBraceLines $ breakUpLine options <$> Text.lines input
width = maximum $ visualWidth options . fst <$> brokenLines
paddedLines
= uncurry (pad options $ max (width + 1) $ minLineWidth options)
<$> brokenLines
spanEnd :: (Char -> Bool) -> Text -> (Text, Text)
spanEnd p s = (Text.dropWhileEnd p s, Text.takeWhileEnd p s)
breakUpLine :: Options -> Text -> (Text, Text)
breakUpLine options line = (code, Text.filter (not . Char.isSpace) braces)
where
(code, braces)
= spanEnd (\c -> Char.isSpace c || c `elem` braceChars options) line
allSpaces :: Text -> Bool
allSpaces = Text.all Char.isSpace
joinBraceLines :: [(Text, Text)] -> [(Text, Text)]
joinBraceLines = reverse . Foldable.foldl' go []
where
go results (code, braces)
| allSpaces code
, not (allSpaces braces)
, (lastCode, lastBraces):results' <- results
, not (allSpaces lastCode)
= (lastCode, lastBraces <> braces) : results'
| otherwise
= (code, braces) : results
pad :: Options -> Int -> Text -> Text -> Text
pad options width code braces
| allSpaces braces = code
| otherwise
= code
<> Text.replicate (width - visualWidth options code) " "
<> braces
visualWidth :: Options -> Text -> Int
visualWidth options s = Text.length s + Text.count "\t" s * (tabWidth options - 1)
|
ollef/braces-be-gone
|
BracesBeGone.hs
|
bsd-3-clause
| 3,642 | 0 | 16 | 756 | 1,062 | 545 | 517 | 90 | 2 |
module Data.Git.Repository (
Repository (), -- | A git repository handle.
GitDir,
ObjectDir,
IndexFile,
WorkTree,
StartPath,
open,
openWithPaths,
openWithPathsAndObjectDatabase,
database,
discover,
index,
init,
initBare,
isHeadDetached,
isHeadOrphan,
isEmpty,
isBare,
path,
indexPath,
objectDatabasePath,
workingDirectoryPath,
config ) where
import Data.Git.Common
import Data.Git.Errors
import Data.Git.Helpers
import Bindings.Libgit2.Repository
import Bindings.Libgit2.Common
import System.FilePath
import Foreign.C.String
import Data.Git.Types
import Control.Monad
import Foreign.Ptr
import Foreign.ForeignPtr
import Foreign.Storable
import Foreign.Marshal
import Control.Exception
import Prelude hiding (init)
import Data.List hiding (init)
type GitDir = FilePath
type ObjectDir = FilePath
type IndexFile = FilePath
type WorkTree = FilePath
type StartPath = FilePath
open :: FilePath -> IO Repository
open fp = withCString fp $ \str -> ptrFunc (\pp -> c'git_repository_open pp str) toRepository
openWithPaths :: GitDir -> Maybe ObjectDir -> Maybe IndexFile -> Maybe WorkTree -> IO Repository
openWithPaths fp od ixf wt = withCString fp $ \str -> do
odcs <- maybeCStr od
ixfcs <- maybeCStr ixf
wtcs <- maybeCStr wt
repo <- ptrFunc (\ptr -> c'git_repository_open2 ptr str odcs ixfcs wtcs) toRepository
mapM_ free [odcs, ixfcs, wtcs]
return repo
openWithPathsAndObjectDatabase :: GitDir -> ObjectDB -> Maybe IndexFile -> Maybe WorkTree -> IO Repository
openWithPathsAndObjectDatabase gd odb mif mwt = withCString gd $ \gd' -> do
ixf <- maybeCStr mif
wt <- maybeCStr mwt
retVal <- ptrFunc (\pp -> c'git_repository_open3 pp gd' (odbPrim odb) ixf wt) toRepository
mapM_ free [ixf, wt]
return retVal
discover :: StartPath -> Bool -> [FilePath] -> IO (Maybe FilePath)
discover fp across ceils = withCString fp $ \start -> withCString ceils' $ \c -> do
buffer <- mallocArray c'GIT_PATH_MAX
result <- c'git_repository_discover buffer c'GIT_PATH_MAX start (fromBool across) c
retVal <- case result of
0 -> peekCString buffer >>= (return . Just)
_ -> return Nothing
free buffer
return retVal
where ceils' = intercalate [pathListSeparator] ceils
database :: Repository -> IO ObjectDB
database repo = withForeignPtr (repoPrim repo) $ \r -> c'git_repository_database r >>= toObjectDB
index :: Repository -> IO Index
index repo = withForeignPtr (repoPrim repo) $ \r -> ptrFunc (\ix -> c'git_repository_index ix r) toIndex
initHelper i fp = withCString fp $ \str -> ptrFunc (\ptr -> c'git_repository_init ptr str i) toRepository
init :: FilePath -> IO Repository
init = initHelper 0
initBare :: FilePath -> IO Repository
initBare = initHelper 1
isHeadDetached :: Repository -> IO Bool
isHeadDetached = boolHelper c'git_repository_head_detached . repoPrim
isHeadOrphan :: Repository -> IO Bool
isHeadOrphan = boolHelper c'git_repository_head_orphan . repoPrim
isEmpty :: Repository -> IO Bool
isEmpty = boolHelper c'git_repository_is_empty . repoPrim
isBare :: Repository -> IO Bool
isBare = boolHelper c'git_repository_is_bare . repoPrim
-- don't need to worry about handling CString here. It's (at least currently) a struct member.
pathHelper num repo = withForeignPtr (repoPrim repo) (\ptr -> c'git_repository_path ptr num) >>= peekCString
path :: Repository -> IO FilePath
path = pathHelper 0
indexPath :: Repository -> IO FilePath
indexPath = pathHelper 1
objectDatabasePath :: Repository -> IO FilePath
objectDatabasePath = pathHelper 2
workingDirectoryPath :: Repository -> IO FilePath
workingDirectoryPath = pathHelper 3
config :: Repository -> FilePath -> FilePath -> IO Config
config repo user system = withCString user $ \u -> withCString system $ \s ->
withForeignPtr (repoPrim repo) $ \r -> ptrFunc (\pp -> c'git_repository_config pp r u s) toConfig
|
iand675/hgit
|
Data/Git/Repository.hs
|
bsd-3-clause
| 3,881 | 0 | 17 | 658 | 1,195 | 616 | 579 | 103 | 2 |
{-#LANGUAGE Arrows, FlexibleInstances, MultiParamTypeClasses, FlexibleContexts,
UndecidableInstances, FunctionalDependencies, NoMonomorphismRestriction #-}
module Control.Arrow.Transformer.Automaton.Monad
(readerArrow, swapsnd, autoToMonad, co, monadToAuto,
pushError,popError,rstrength,
ArrowAddAutomaton (..), dispatch) where
import Control.Monad
import Control.Monad.Cont
import Control.Monad.State (MonadState (..))
import Control.Arrow
import Control.Arrow.Operations
import qualified Control.Arrow.Transformer as AT
import Control.Arrow.Transformer.All
import Control.Arrow.Transformer.Automaton.Maybe
import Data.Maybe
import qualified Data.Map as M
unAM (ArrowMonad f) = f
monadToAuto
:: (ArrowAddAutomaton a may a', ArrowApply a') =>
(i -> ContT (o, a i o) (ArrowMonad a') z) -> a i o
monadToAuto f = liftAutomaton (proc i ->
unAM ((f i) `runContT` (error "automaton ended")) -<< ())
co
:: (ArrowApply a', ArrowAddAutomaton a may a') =>
o -> ContT (o, a i o) (ArrowMonad a') i
co o = ContT (\fi ->
return (o, liftAutomaton (proc i -> unAM (fi i) -<< ())))
autoToMonad
:: (ArrowApply a', ArrowAddAutomaton a may a') =>
a i (Either o z)
-> i
-> ContT (o, a i o) (ArrowMonad a') z
autoToMonad f i = do
x <- lift $ ArrowMonad $ (proc () -> elimAutomaton f -< i)
case x of
(Right z,_) -> return z
(Left o,f') -> autoToMonad f' =<< co o
class (ArrowChoice a, ArrowChoice may, ArrowChoice a', ArrowApply a')
=> ArrowAddAutomaton a may a' | a -> a', a -> may, may -> a where
elimAutomaton :: a i o -> a' i (o, a i o)
liftAutomaton :: a' i (o, a i o) -> a i o
constantAutomaton :: a' i o -> a i o
toMaybeAutomaton :: a i o -> may i o
fromMaybeAutomaton :: may i o -> a i o
liftMaybeAutomaton :: a' i (o, Maybe (a i o)) -> may i o
elimMaybeAutomaton :: may i o -> a' i (o, Maybe (a i o))
instance (ArrowChoice a, ArrowApply a) =>
ArrowAddAutomaton (Automaton a) (MaybeAutomaton a) a where
elimAutomaton (Automaton f) = f
liftAutomaton = Automaton
constantAutomaton f = Automaton (f >>>
arr (flip (,) (constantAutomaton f)))
toMaybeAutomaton = maybeFromAutomaton
fromMaybeAutomaton = automatonFromMaybe
liftMaybeAutomaton = MaybeAutomaton
elimMaybeAutomaton = mAut
instance (Arrow a, Arrow may, Arrow a', ArrowAddAutomaton a may a')
=> ArrowAddAutomaton (StateArrow s a) (StateArrow s may) (StateArrow s a') where
elimAutomaton = autoState . elimAutomaton . runState
liftAutomaton = stateArrow . liftAutomaton . stateAuto
constantAutomaton = stateArrow . constantAutomaton . runState
toMaybeAutomaton = stateArrow . toMaybeAutomaton . runState
fromMaybeAutomaton = stateArrow . fromMaybeAutomaton . runState
liftMaybeAutomaton = error "not implemented yet"
elimMaybeAutomaton = error "not implemented yet"
instance (ArrowState s a, ArrowApply a) => (MonadState s (ArrowMonad a)) where
put s = ArrowMonad (proc () -> store -< s)
get = ArrowMonad fetch
instance (Arrow a, Arrow a', ArrowAddAutomaton a may a')
=> ArrowAddAutomaton (ReaderArrow r a) (ReaderArrow r may) (ReaderArrow r a') where
elimAutomaton = (>>> (second (arr readerArrow))) .
readerArrow . elimAutomaton . runReader
elimMaybeAutomaton = (>>> (second (arr (fmap readerArrow)))) .
readerArrow . elimMaybeAutomaton . runReader
liftAutomaton = readerArrow . liftAutomaton .
(>>> (second (arr runReader))) . runReader
liftMaybeAutomaton = readerArrow . liftMaybeAutomaton .
(>>> (second (arr (fmap runReader)))) . runReader
constantAutomaton = readerArrow . constantAutomaton . runReader
toMaybeAutomaton = readerArrow . toMaybeAutomaton . runReader
fromMaybeAutomaton = readerArrow . fromMaybeAutomaton . runReader
instance (ArrowChoice a, ArrowChoice may, ArrowChoice a', ArrowAddAutomaton a may a')
=> ArrowAddAutomaton (ErrorArrow ex a) (ErrorArrow ex may) (ErrorArrow ex a') where
elimAutomaton = pushError .
(>>> second (arr pushError) >>> arr rstrength)
. elimAutomaton . popError
elimMaybeAutomaton = pushError .
(>>> second (arr (fmap pushError)) >>> arr rstrength)
. elimMaybeAutomaton . popError
liftAutomaton f =
pushError $ liftAutomaton $
(>>> arr (revrstrength (liftAutomaton f))
>>> second (arr popError))
$ popError f
liftMaybeAutomaton f =
pushError $ liftMaybeAutomaton $
(>>> arr (revrstrength (Just $ fromMaybeAutomaton $ liftMaybeAutomaton f))
>>> second (arr (fmap popError)))
$ popError f
constantAutomaton = pushError . constantAutomaton . popError
toMaybeAutomaton = pushError . toMaybeAutomaton . popError
fromMaybeAutomaton = pushError . fromMaybeAutomaton . popError
dispatch = dispatch0 M.empty
dispatch0
:: (ArrowAddAutomaton a may a',
Ord k) =>
M.Map k (may i o) -> (k -> may i o) -> may (i, k) o
dispatch0 mp def = liftMaybeAutomaton $ proc (i,k) -> do
let f = fromMaybe (def k) (M.lookup k mp)
(o,f') <- app -< (elimMaybeAutomaton f,i)
case f' of
Nothing -> returnA -< (o, Nothing)
Just f' -> returnA -< (o, Just $ fromMaybeAutomaton $ dispatch0 (M.insert k (toMaybeAutomaton f') mp) def)
--Utility functions
swapsnd :: ((a, b), c) -> ((a, c), b)
swapsnd ~(~(x, y), z) = ((x, z), y)
rstrength :: (Either ex a, b) -> Either ex (a, b)
rstrength (Left ex, _) = Left ex
rstrength (Right a, b) = Right (a, b)
revrstrength :: b -> Either ex (a,b) -> (Either ex a, b)
revrstrength def (Left ex) = (Left ex, def)
revrstrength _ (Right (a,b)) = (Right a, b)
autoState :: (Arrow a, Arrow a') => a' (i,s) ((o,s), a (i,s) (o,s)) ->
StateArrow s a' i (o,StateArrow s a i o)
autoState f = stateArrow $ f >>> second (arr stateArrow) >>> arr swapsnd
stateAuto :: (Arrow a, Arrow a') => StateArrow s a' i (o,StateArrow s a i o) ->
a' (i,s) ((o,s), a (i,s) (o,s))
stateAuto f = runState (f >>> second (arr runState)) >>> arr swapsnd
--simulating the unexported data constructors for StateArrow,
--ReaderArrow, ErrorArrow
stateArrow :: (Arrow a) => a (t, s) (b, s) -> StateArrow s a t b
stateArrow f = proc i -> do
s <- fetch -< ()
(o,s') <- AT.lift f -< (i,s)
store -< s'
returnA -< o
readerArrow :: (Arrow a) => a (e,r) b -> ReaderArrow r a e b
readerArrow f = proc i -> do
r <- readState -< ()
AT.lift f -< (i,r)
popError :: (ArrowChoice a) => ErrorArrow ex a e b -> a e (Either ex b)
popError f = runError (f >>> arr Right) (arr snd >>> arr Left)
pushError :: (ArrowChoice a) => a e (Either ex b) -> ErrorArrow ex a e b
pushError f = (AT.lift f) >>> (raise ||| arr id)
|
jhp/on-a-horse
|
Control/Arrow/Transformer/Automaton/Monad.hs
|
bsd-3-clause
| 7,135 | 9 | 19 | 1,885 | 2,716 | 1,425 | 1,291 | 141 | 2 |
{-# language CPP #-}
-- No documentation found for Chapter "PipelineCreationFeedbackFlagBits"
module Vulkan.Core13.Enums.PipelineCreationFeedbackFlagBits ( pattern PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT
, pattern PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT
, pattern PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT
, PipelineCreationFeedbackFlags
, PipelineCreationFeedbackFlagBits( PIPELINE_CREATION_FEEDBACK_VALID_BIT
, PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT
, PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT
, ..
)
) where
import Vulkan.Internal.Utils (enumReadPrec)
import Vulkan.Internal.Utils (enumShowsPrec)
import GHC.Show (showString)
import Numeric (showHex)
import Vulkan.Zero (Zero)
import Data.Bits (Bits)
import Data.Bits (FiniteBits)
import Foreign.Storable (Storable)
import GHC.Read (Read(readPrec))
import GHC.Show (Show(showsPrec))
import Vulkan.Core10.FundamentalTypes (Flags)
-- No documentation found for TopLevel "VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT"
pattern PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT = PIPELINE_CREATION_FEEDBACK_VALID_BIT
-- No documentation found for TopLevel "VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT"
pattern PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT = PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT
-- No documentation found for TopLevel "VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT"
pattern PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT = PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT
type PipelineCreationFeedbackFlags = PipelineCreationFeedbackFlagBits
-- | VkPipelineCreationFeedbackFlagBits - Bitmask specifying pipeline or
-- pipeline stage creation feedback
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_EXT_pipeline_creation_feedback VK_EXT_pipeline_creation_feedback>,
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_3 VK_VERSION_1_3>,
-- 'Vulkan.Core13.Promoted_From_VK_EXT_pipeline_creation_feedback.PipelineCreationFeedback',
-- 'Vulkan.Core13.Promoted_From_VK_EXT_pipeline_creation_feedback.PipelineCreationFeedbackCreateInfo',
-- 'PipelineCreationFeedbackFlags'
newtype PipelineCreationFeedbackFlagBits = PipelineCreationFeedbackFlagBits Flags
deriving newtype (Eq, Ord, Storable, Zero, Bits, FiniteBits)
-- | 'PIPELINE_CREATION_FEEDBACK_VALID_BIT' indicates that the feedback
-- information is valid.
pattern PIPELINE_CREATION_FEEDBACK_VALID_BIT = PipelineCreationFeedbackFlagBits 0x00000001
-- | 'PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT'
-- indicates that a readily usable pipeline or pipeline stage was found in
-- the @pipelineCache@ specified by the application in the pipeline
-- creation command.
--
-- An implementation /should/ set the
-- 'PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT' bit if
-- it was able to avoid the large majority of pipeline or pipeline stage
-- creation work by using the @pipelineCache@ parameter of
-- 'Vulkan.Core10.Pipeline.createGraphicsPipelines',
-- 'Vulkan.Extensions.VK_KHR_ray_tracing_pipeline.createRayTracingPipelinesKHR',
-- 'Vulkan.Extensions.VK_NV_ray_tracing.createRayTracingPipelinesNV', or
-- 'Vulkan.Core10.Pipeline.createComputePipelines'. When an implementation
-- sets this bit for the entire pipeline, it /may/ leave it unset for any
-- stage.
--
-- Note
--
-- Implementations are encouraged to provide a meaningful signal to
-- applications using this bit. The intention is to communicate to the
-- application that the pipeline or pipeline stage was created \"as fast as
-- it gets\" using the pipeline cache provided by the application. If an
-- implementation uses an internal cache, it is discouraged from setting
-- this bit as the feedback would be unactionable.
pattern PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT = PipelineCreationFeedbackFlagBits 0x00000002
-- | 'PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT' indicates
-- that the base pipeline specified by the @basePipelineHandle@ or
-- @basePipelineIndex@ member of the @Vk*PipelineCreateInfo@ structure was
-- used to accelerate the creation of the pipeline.
--
-- An implementation /should/ set the
-- 'PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT' bit if it
-- was able to avoid a significant amount of work by using the base
-- pipeline.
--
-- Note
--
-- While \"significant amount of work\" is subjective, implementations are
-- encouraged to provide a meaningful signal to applications using this
-- bit. For example, a 1% reduction in duration may not warrant setting
-- this bit, while a 50% reduction would.
pattern PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT = PipelineCreationFeedbackFlagBits 0x00000004
conNamePipelineCreationFeedbackFlagBits :: String
conNamePipelineCreationFeedbackFlagBits = "PipelineCreationFeedbackFlagBits"
enumPrefixPipelineCreationFeedbackFlagBits :: String
enumPrefixPipelineCreationFeedbackFlagBits = "PIPELINE_CREATION_FEEDBACK_"
showTablePipelineCreationFeedbackFlagBits :: [(PipelineCreationFeedbackFlagBits, String)]
showTablePipelineCreationFeedbackFlagBits =
[ (PIPELINE_CREATION_FEEDBACK_VALID_BIT , "VALID_BIT")
, (PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT, "APPLICATION_PIPELINE_CACHE_HIT_BIT")
, (PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT , "BASE_PIPELINE_ACCELERATION_BIT")
]
instance Show PipelineCreationFeedbackFlagBits where
showsPrec = enumShowsPrec enumPrefixPipelineCreationFeedbackFlagBits
showTablePipelineCreationFeedbackFlagBits
conNamePipelineCreationFeedbackFlagBits
(\(PipelineCreationFeedbackFlagBits x) -> x)
(\x -> showString "0x" . showHex x)
instance Read PipelineCreationFeedbackFlagBits where
readPrec = enumReadPrec enumPrefixPipelineCreationFeedbackFlagBits
showTablePipelineCreationFeedbackFlagBits
conNamePipelineCreationFeedbackFlagBits
PipelineCreationFeedbackFlagBits
|
expipiplus1/vulkan
|
src/Vulkan/Core13/Enums/PipelineCreationFeedbackFlagBits.hs
|
bsd-3-clause
| 7,019 | 1 | 10 | 1,468 | 462 | 293 | 169 | -1 | -1 |
{-# LANGUAGE ViewPatterns, TupleSections #-}
module Evaluator.Evaluate where
import Evaluator.Syntax
import Core.Syntax
import Renaming
import Utilities
var :: IdSupply -> InVar -> Chain
var ids in_x = Caboose (ids, Question in_x)
eval :: IdSupply -> In Term -> Chain
eval ids (rn, Var x) = var ids (mkInVar rn x)
eval ids (rn, Value v) = Caboose (ids, Answer (rn, v))
eval ids (rn, App e1 x2) = Push (Apply (mkInVar rn x2)) `Wagon` eval ids (rn, e1)
eval ids (rn, PrimOp pop (x:xs)) = Push (PrimApply pop [] (map (mkInVar rn) xs)) `Wagon` var ids (mkInVar rn x)
eval ids (rn, Case x alts) = Push (Scrutinise (rn, alts)) `Wagon` var ids (mkInVar rn x)
eval ids (rn, LetRec (unzip -> (xs, es)) e)
= Allocate (map (mkInVar rn') xs `zip` map (rn',) es) `Wagon` eval ids' (rn', e)
where (ids', rn', _xs') = renameBinders ids rn xs
resumeEvaluationContextFrame :: IdSupply -> EvaluationContextFrame -> In Value -> Chain
resumeEvaluationContextFrame ids (Apply in_x2) in_v = apply ids in_v in_x2
resumeEvaluationContextFrame ids (Scrutinise in_alts) in_v = scrutinise ids in_v in_alts
resumeEvaluationContextFrame ids (PrimApply pop in_vs in_xs) in_v = primop ids pop in_vs in_v in_xs
resumeEvaluationContextFrame ids (Update in_x) in_v = update ids in_x in_v
apply :: IdSupply -> In Value -> InVar -> Chain
apply ids (rn, Lambda x e_body) in_x2 = eval ids (insertRenaming x (outvar in_x2) rn, e_body)
scrutinise :: IdSupply -> In Value -> In [Alt] -> Chain
scrutinise ids (_, Literal l) (rn_alts, alts) = eval ids (head [(rn_alts, alt_e) | (LiteralAlt alt_l, alt_e) <- alts, alt_l == l])
scrutinise ids (rn_v, Data dc xs) (rn_alts, alts) = eval ids (head [(insertRenamings (alt_xs `zip` map (rename rn_v) xs) rn_alts, alt_e) | (DataAlt alt_dc alt_xs, alt_e) <- alts, alt_dc == dc])
primop :: IdSupply -> PrimOp -> [In Value] -> In Value -> [InVar] -> Chain
primop ids pop [(_, Literal l1)] (_, Literal l2) [] = Caboose (ids, Answer (emptyRenaming, Literal (f pop l1 l2)))
where f pop = case pop of Add -> (+); Subtract -> (-); Multiply -> (*); Divide -> div
primop ids pop in_vs in_v (in_x:in_xs) = Push (PrimApply pop (in_vs ++ [in_v]) in_xs) `Wagon` var ids in_x
update :: IdSupply -> InVar -> In Value -> Chain
update ids in_x in_v = Allocate [(in_x, second Value in_v)] `Wagon` Caboose (ids, Answer in_v)
|
batterseapower/core-haskell
|
Evaluator/Evaluate.hs
|
bsd-3-clause
| 2,409 | 0 | 15 | 495 | 1,108 | 584 | 524 | 33 | 4 |
module XML (
module XML.Server,
module XML.Character,
module XML.Eve
) where
import XML.Server
import XML.Character
import XML.Eve
|
Frefreak/Gideon
|
src/XML.hs
|
bsd-3-clause
| 148 | 0 | 5 | 34 | 39 | 25 | 14 | 7 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.