code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
-- This file is formatted with Ormolu
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module Types
( Named (..),
NamedIcon,
IconInfo,
Icon (..),
SyntaxNode (..),
NodeName (..),
Port (..),
NameAndPort (..),
Connection,
Edge (..),
EdgeOption (..),
Drawing (..),
IDState (..),
SpecialQDiagram,
SpecialBackend,
SpecialNum,
SgNamedNode,
IngSyntaxGraph,
LikeApplyFlavor (..),
CaseOrMultiIfTag (..),
Labeled (..),
EmbedDirection (..),
EmbedInfo (..),
AnnotatedGraph,
NodeInfo (..),
Embedder (..),
mkEmbedder,
EmbedderSyntaxNode,
)
where
import qualified Data.Graph.Inductive as ING
import qualified Data.IntMap as IM
import Data.Set (Set, empty)
import Data.Typeable (Typeable)
import Diagrams.Prelude (Any, IsName, Path, QDiagram, Renderable, V2)
import Diagrams.TwoD.Text (Text)
newtype NodeName = NodeName Int deriving (Typeable, Eq, Ord, Show)
instance IsName NodeName
data Named a = Named {naName :: NodeName, naVal :: a}
deriving (Show, Eq, Ord, Functor)
type NamedIcon = Named Icon
data Labeled a = Labeled {laValue :: a, laLabel :: String}
deriving (Show, Eq, Ord)
instance Functor Labeled where
fmap f (Labeled value str) = Labeled (f value) str
instance Applicative Labeled where
pure x = Labeled x ""
(Labeled f fStr) <*> (Labeled x xStr) = Labeled (f x) (fStr <> xStr)
type IconInfo = IM.IntMap Icon
-- | A datatype that represents an icon.
-- The TextBoxIcon's data is the text that appears in the text box.
data Icon
= TextBoxIcon String
| MultiIfIcon
Int -- Number of alternatives
| LambdaIcon
[String] -- Parameter labels
(Maybe NodeName) -- Function body expression
(Set NodeName) -- Nodes inside the lambda
| CaseIcon Int
| CaseResultIcon
| BindTextBoxIcon String
| NestedApply
LikeApplyFlavor -- apply or compose
(Maybe NodeName) -- The function for apply, or the argument for compose
[Maybe NodeName] -- list of arguments or functions
| NestedPApp
(Labeled (Maybe NamedIcon)) -- Data constructor
[Labeled (Maybe NamedIcon)] -- Arguments
| NestedCaseIcon [Maybe NodeName]
| NestedMultiIfIcon [Maybe NodeName]
deriving (Show, Eq, Ord)
data LikeApplyFlavor = ApplyNodeFlavor | ComposeNodeFlavor
deriving (Show, Eq, Ord)
data CaseOrMultiIfTag = CaseTag | MultiIfTag deriving (Show, Eq, Ord)
-- TODO The full edge does not need to be included, just the port.
data Embedder a = Embedder
{ -- | Set of embedded nodes
emEmbedded :: Set (NodeName, Edge),
emNode :: a
}
deriving (Show, Eq, Ord, Functor)
mkEmbedder :: a -> Embedder a
mkEmbedder = Embedder empty
type EmbedderSyntaxNode = Embedder SyntaxNode
type SgNamedNode = Named EmbedderSyntaxNode
-- TODO remove Ints from SyntaxNode data constructors.
data SyntaxNode
= -- Function application, composition, and applying to a composition
-- The list of nodes is unordered (replace with a map?)
ApplyNode LikeApplyFlavor Int
| PatternApplyNode String [Labeled (Maybe SgNamedNode)]
| NameNode String -- Identifiers or symbols
| BindNameNode String
| LiteralNode String -- Literal values like the string "Hello World"
| FunctionDefNode -- Function definition (ie. lambda expression)
[String] -- Parameter labels
(Set NodeName) -- Nodes inside the lambda
| CaseResultNode -- TODO remove caseResultNode
| CaseOrMultiIfNode CaseOrMultiIfTag Int
deriving (Show, Eq, Ord)
newtype Port = Port Int deriving (Typeable, Eq, Ord, Show)
instance IsName Port
data NameAndPort = NameAndPort NodeName (Maybe Port) deriving (Show, Eq, Ord)
type Connection = (NameAndPort, NameAndPort)
-- TODO Consider removing EdgeOption since it's unused.
data EdgeOption = EdgeInPattern deriving (Show, Eq, Ord)
-- | An Edge has an name of the source icon, and its optional port number,
-- and the name of the destination icon, and its optional port number.
data Edge = Edge
{ edgeOptions :: [EdgeOption],
edgeConnection :: Connection
}
deriving (Show, Eq, Ord)
-- | A drawing is a map from names to Icons, a list of edges,
-- and a map of names to subDrawings
data Drawing = Drawing [NamedIcon] [Edge] deriving (Show, Eq)
-- | IDState is an Abstract Data Type that is used as a state whose value is a
-- unique id.
newtype IDState = IDState Int deriving (Eq, Show)
type SpecialNum n =
(Floating n, RealFrac n, RealFloat n, Typeable n, Show n, Enum n)
-- Note that SpecialBackend is a constraint synonym, not a type synonym.
type SpecialBackend b n =
(SpecialNum n, Renderable (Path V2 n) b, Renderable (Text n) b)
type SpecialQDiagram b n = QDiagram b V2 n Any
type IngSyntaxGraph gr = gr SgNamedNode Edge
data EmbedDirection
= EdEmbedFrom -- The tail
| EdEmbedTo -- The head
deriving (Show, Eq)
-- A Nothing eiEmbedDir means the edge is not embedded.
data EmbedInfo a = EmbedInfo {eiEmbedDir :: Maybe EmbedDirection, eiVal :: a}
deriving (Show, Eq, Functor)
type AnnotatedGraph gr = gr (NodeInfo SgNamedNode) (EmbedInfo Edge)
data NodeInfo a = NodeInfo
{ niParent :: Maybe ING.Node,
niVal :: a
}
deriving (Show, Eq, Functor, Ord)
|
rgleichman/glance
|
app/Types.hs
|
gpl-3.0
| 5,269 | 0 | 10 | 1,078 | 1,305 | 769 | 536 | 124 | 1 |
module Frame.Data
( Payload
, getPayload
, mkPayload
, putPayload
, endStreamF
, isEndStream
, paddedF
, isPadded
, getData
) where
import qualified Data.Binary.Get as Get
import qualified Data.Binary.Put as Put
import qualified Frame.Internal.Padding as Padding
import Control.Monad.Except(ExceptT)
import Control.Monad.Trans.Class(lift)
import Data.Binary.Get(Get)
import Data.Binary.Put(Put)
import Data.ByteString.Lazy(ByteString)
import Frame.Internal.Padding(PaddingDesc(..))
import ProjectPrelude
import ErrorCodes
endStreamF :: FrameFlags
endStreamF = 0x1
isEndStream :: FrameFlags -> Bool
isEndStream f = testFlag f endStreamF
paddedF :: FrameFlags
paddedF = 0x8
isPadded :: FrameFlags -> Bool
isPadded f = testFlag f paddedF
data Payload = Payload {
pPadding :: Maybe PaddingDesc,
pData :: ByteString
}
getPayload :: FrameLength -> FrameFlags -> StreamId -> ExceptT ConnError Get Payload
getPayload fLength flags _ = do
(fLength, paddingLength) <- Padding.getLength fLength flags
pData <- lift $ Get.getLazyByteString $
fromIntegral (maybe fLength ((fLength -) . fromIntegral) paddingLength)
pPadding <- lift $ Padding.getPadding paddingLength
return $ Payload { pPadding, pData }
putPayload :: Payload -> Put
putPayload Payload { pPadding, pData } = do
Padding.putLength pPadding
Put.putLazyByteString pData
Padding.putPadding pPadding
mkPayload :: ByteString -> Payload
mkPayload pData = Payload { pPadding = Nothing, pData }
getData :: Payload -> ByteString
getData = pData
|
authchir/SoSe17-FFP-haskell-http2-server
|
src/Frame/Data.hs
|
gpl-3.0
| 1,534 | 0 | 14 | 240 | 447 | 253 | 194 | -1 | -1 |
import System.IO
import Control.Monad
main = main' 0
main' counter = do
p <- hIsEOF stdin
-- to swap above line to IsEOF requires extra import
unless p $ do
x <- getLine
let newLength = length x
let ellipsis = if newLength > 50 then "..." else ""
-- display shows line count starting at 1
-- need to allow say 3 digit space : 001 with leading zero supression
let message = show (counter + 1) ++ " " ++ show newLength ++ " " ++ take 50 x ++ ellipsis
putStrLn message
main' (succ counter)
|
xpika/line-size
|
Main.hs
|
gpl-3.0
| 503 | 0 | 20 | 114 | 155 | 73 | 82 | 12 | 2 |
{-|
Module : Data.Map.KDMap
Description : KD map data structure for storing spike amplitudes.
Undocumented, will be replaced by kdt package.
Copyright : (c) 2015 Greg Hale, Shea Levy
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : GHC, Linux
-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Data.Map.KDMap where
import Control.Applicative
import Data.Maybe (maybeToList)
import Data.Monoid
import qualified Data.List as L
import Data.Ord (comparing)
import qualified Data.Foldable as F
data KDMap k a = KDEmpty
| KDLeaf k a Depth
| KDBranch k a Depth (KDMap k a) (KDMap k a)
deriving (Eq, Show)
newtype Weight = Weight {unWeight :: Double}
deriving (Show, Eq, Num, Enum, Fractional,Real,Ord)
newtype Depth = Depth {unDepth :: Int}
deriving (Eq,Show,Num,Enum,Integral,Real,Ord)
class KDKey k where
pointD :: k -> Depth -> Double
pointW :: k -> Weight
pointSize :: k -> Depth
pointDDistSq :: k -> k -> Depth -> Double
pointDDistSq a b i = (pointD b i - pointD a i)^(2::Int)
pointDistSq :: k -> k -> Double
pointDistSq a b = sum $ map (pointDDistSq a b)
[0..pointSize a - 1]
dimOrder :: k -> k -> Depth -> Ordering
dimOrder a b n = compare (pointD a n) (pointD b n)
dSucc :: k -> Depth -> Depth
dPred :: k -> Depth -> Depth
instance F.Foldable (KDMap k) where
foldr _ z KDEmpty = z
foldr f z (KDLeaf _ a _) = f a z
foldr f z (KDBranch _ v _ kdLeft kdRight) =
F.foldr f (f v (F.foldr f z kdLeft)) kdRight
data Point2 = Point2 {p2x :: Double
,p2y :: Double
,p2w :: Weight
}
deriving (Show, Eq)
data Point4 = Point4 {p4a :: Double
,p4b :: Double
,p4c :: Double
,p4d :: Double
,p4w :: Weight
}
deriving (Show, Eq)
instance KDKey Point2 where
pointD p 0 = p2x p
pointD p 1 = p2y p
pointD _ n = error $ "Point2 out of bounds index: " ++ show n
pointSize _ = 2
pointW p = p2w p
dSucc p d = succ d `mod` fromIntegral (pointSize p)
dPred p d = pred d `mod` fromIntegral (pointSize p)
instance KDKey Point4 where
pointD p 0 = p4a p
pointD p 1 = p4b p
pointD p 2 = p4c p
pointD p 3 = p4d p
pointD _ n = error $ "Point4 out of bounds index: " ++ show n
pointSize p = 4
pointW p = p4w p
dSucc p d = succ d `mod` fromIntegral (pointSize p)
dPred p d = pred d `mod` fromIntegral (pointSize p)
instance Monoid Point2 where
mempty = Point2 0 0 0
a `mappend` b = Point2 x' y' w'
where w' = p2w a + p2w b
aFrac = realToFrac $ p2w a / w' :: Double
bFrac = realToFrac $ p2w b / w' :: Double
x' = p2x a * aFrac + p2x b * bFrac :: Double
y' = p2y a * aFrac + p2y b * bFrac :: Double
instance Monoid Point4 where
mempty = Point4 0 0 0 0 0
a `mappend` b = Point4 a' b' c' d' w'
where w' = p4w a + p4w b
aFrac = realToFrac $ p4w a / w' :: Double
bFrac = realToFrac $ p4w b / w'
x' n = pointD a n * aFrac + pointD b n * bFrac
(a', b', c', d') = (x' 0, x' 1, x' 2, x' 3)
toList :: KDMap k a -> [(k,a)]
toList KDEmpty = []
toList (KDLeaf k a _) = [(k,a)]
toList (KDBranch k a _ kdLeft kdRight) = (k,a) : toList kdLeft ++ toList kdRight
closer :: (Eq k, KDKey k) => Maybe (k,a) -> Maybe (k,a) -> k -> Maybe (k,a)
closer Nothing Nothing _ = Nothing
closer a Nothing _ = a
closer Nothing b _ = b
closer (Just optA@(kA,_)) (Just optB@(kB,_)) k
| pointDistSq kA k < pointDistSq kB k = Just optA
| otherwise = Just optB
add :: (Eq k, Eq a, Monoid a, KDKey k, Monoid k) => KDMap k a -> Double -> k -> a -> KDMap k a
add m thresh k a = case closest k m of
Nothing -> insert 0 k a m
Just (k',a') -> if pointDistSq k k' <= thresh^(2::Int)
then insert 0 (k <> k') (a <> a') . delete k' $ m
else insert 0 k a m
delete :: (Eq k, KDKey k) => k -> KDMap k a -> KDMap k a
delete _ KDEmpty = KDEmpty
delete k m@(KDLeaf k' _ _)
| k == k' = KDEmpty
| otherwise = m
delete k (KDBranch k' a' d kdLeft kdRight)
| k == k' = fromListWithDepth d (toList kdLeft ++ toList kdRight)
| otherwise = case dimOrder k k' d of
EQ -> KDBranch k' a' d (delete k kdLeft) (delete k kdRight)
LT -> KDBranch k' a' d (delete k kdLeft) kdRight
GT -> KDBranch k' a' d kdLeft (delete k kdRight)
closest :: (Eq a, Eq k, KDKey k) => k -> KDMap k a -> Maybe (k,a)
closest _ KDEmpty = Nothing
closest _ (KDLeaf k' a' _) = Just (k',a')
closest k (KDBranch k' a' d' kdLeft kdRight) = case dimOrder k k' d' of
LT -> findNearest kdLeft kdRight
_ -> findNearest kdRight kdLeft
where
findNearest treeA treeB =
let mainCandidates = case closest k treeA of
Nothing -> [(k',a')]
Just (k'',a'') -> [(k',a'),(k'',a'')]
otherCandidates
| (pointDistSq k k') >= (pointD k d' - pointD k' d')^(2::Int) =
maybeToList (closest k treeB)
| otherwise = []
in Just $ L.minimumBy (comparing (pointDistSq k . fst))
(mainCandidates ++ otherCandidates)
------------------------------------------------------------------------------
allInRange :: (Eq a, Eq k, KDKey k) => Double -> k -> KDMap k a -> [(k,a)]
allInRange distThreshold k =
filter (\p -> pointDistSq k (fst p) < distThreshold^(2::Int)) .
allInRangeAux distThreshold k
allInRangeAux :: (Eq a, Eq k, KDKey k) => Double -> k
-> KDMap k a -> [(k,a)]
allInRangeAux _ _ KDEmpty = []
allInRangeAux distThreshold k' (KDLeaf k a d)
| pointDDistSq k' k d <= distThreshold^(2::Int) = [(k,a)]
| otherwise = []
allInRangeAux distThreshold k' (KDBranch k a d treeL treeR)
| pointDDistSq k' k d <= distThreshold^(2::Int) =
[(k,a)]
++ allInRangeAux distThreshold k' treeL
++ allInRangeAux distThreshold k' treeR
| otherwise = []
------------------------------------------------------------------------------
isValid :: (Eq k, KDKey k,Show a,Show k) => KDMap k a -> Bool
isValid KDEmpty = True
isValid (KDLeaf _ _ _) = True
isValid (KDBranch k _ d kdLeft kdRight) =
thisValid && isValid kdLeft && isValid kdRight
where thisValid = all (\(k',_) -> dimOrder k' k d == LT) (toList kdLeft)
&&
all (\(k',_) -> dimOrder k' k d /= LT) (toList kdRight)
fromListWithDepth :: (KDKey k) => Depth -> [(k,a)] -> KDMap k a
fromListWithDepth _ [] = KDEmpty
fromListWithDepth d [(k,a)] = KDLeaf k a d
fromListWithDepth d ps@((k,_):_) = node'
where
psSort = L.sortBy (comparing (flip pointD d . fst)) ps
medInd = L.length psSort `div` 2
(kMed,aMed) = psSort !! medInd
kdLeft = fromListWithDepth (dSucc k d) (take medInd psSort)
kdRight = fromListWithDepth (dSucc k d) (drop (medInd + 1) psSort)
node' = KDBranch kMed aMed d kdLeft kdRight
keys :: KDMap k a -> [k]
keys KDEmpty = []
keys (KDLeaf k _ _ ) = [k]
keys (KDBranch k _ _ kdLeft kdRight) = k : (keys kdLeft ++ keys kdRight)
insert :: (Eq k, KDKey k) => Depth -> k -> a -> KDMap k a -> KDMap k a
insert d k a KDEmpty = KDLeaf k a d
insert _ k a (KDLeaf k' a' d')
| k == k' = KDLeaf k a d'
| otherwise = case dimOrder k k' d' of
LT -> KDBranch k' a' d' (KDLeaf k a (dSucc k d')) KDEmpty
_ -> KDBranch k' a' d' KDEmpty (KDLeaf k a (dSucc k d'))
insert _ k a (KDBranch k' a' d' kdLeft kdRight)
| k == k' = KDBranch k a d' kdLeft kdRight
| otherwise = case dimOrder k k' d' of
LT -> KDBranch k' a' d' (insert (dSucc k d') k a kdLeft) kdRight
_ -> KDBranch k' a' d' kdLeft (insert (dSucc k d') k a kdRight)
|
imalsogreg/arte-ephys
|
tetrode-ephys/src/Data/Map/KDMap.hs
|
gpl-3.0
| 7,926 | 0 | 18 | 2,353 | 3,455 | 1,766 | 1,689 | 173 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- |
-- Module : Network.Google.CloudSearch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Cloud Search provides cloud-based search capabilities over Google
-- Workspace data. The Cloud Search API allows indexing of non-Google
-- Workspace data into Cloud Search.
--
-- /See:/ <https://developers.google.com/cloud-search/docs/guides/ Cloud Search API Reference>
module Network.Google.CloudSearch
(
-- * Service Configuration
cloudSearchService
-- * OAuth Scopes
, cloudSearchSettingsQueryScope
, cloudSearchStatsIndexingScope
, cloudSearchIndexingScope
, cloudSearchScope
, cloudSearchSettingsIndexingScope
, cloudSearchQueryScope
, cloudSearchDebugScope
, cloudSearchStatsScope
, cloudSearchSettingsScope
-- * API Declaration
, CloudSearchAPI
-- * Resources
-- ** cloudsearch.debug.datasources.items.checkAccess
, module Network.Google.Resource.CloudSearch.Debug.Datasources.Items.CheckAccess
-- ** cloudsearch.debug.datasources.items.searchByViewUrl
, module Network.Google.Resource.CloudSearch.Debug.Datasources.Items.SearchByViewURL
-- ** cloudsearch.debug.datasources.items.unmappedids.list
, module Network.Google.Resource.CloudSearch.Debug.Datasources.Items.UnmAppedids.List
-- ** cloudsearch.debug.identitysources.items.listForunmappedidentity
, module Network.Google.Resource.CloudSearch.Debug.Identitysources.Items.ListForunmAppedidentity
-- ** cloudsearch.debug.identitysources.unmappedids.list
, module Network.Google.Resource.CloudSearch.Debug.Identitysources.UnmAppedids.List
-- ** cloudsearch.indexing.datasources.deleteSchema
, module Network.Google.Resource.CloudSearch.Indexing.Datasources.DeleteSchema
-- ** cloudsearch.indexing.datasources.getSchema
, module Network.Google.Resource.CloudSearch.Indexing.Datasources.GetSchema
-- ** cloudsearch.indexing.datasources.items.delete
, module Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.Delete
-- ** cloudsearch.indexing.datasources.items.deleteQueueItems
, module Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.DeleteQueueItems
-- ** cloudsearch.indexing.datasources.items.get
, module Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.Get
-- ** cloudsearch.indexing.datasources.items.index
, module Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.Index
-- ** cloudsearch.indexing.datasources.items.list
, module Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.List
-- ** cloudsearch.indexing.datasources.items.poll
, module Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.Poll
-- ** cloudsearch.indexing.datasources.items.push
, module Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.Push
-- ** cloudsearch.indexing.datasources.items.unreserve
, module Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.Unreserve
-- ** cloudsearch.indexing.datasources.items.upload
, module Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.Upload
-- ** cloudsearch.indexing.datasources.updateSchema
, module Network.Google.Resource.CloudSearch.Indexing.Datasources.UpdateSchema
-- ** cloudsearch.media.upload
, module Network.Google.Resource.CloudSearch.Media.Upload
-- ** cloudsearch.operations.get
, module Network.Google.Resource.CloudSearch.Operations.Get
-- ** cloudsearch.operations.lro.list
, module Network.Google.Resource.CloudSearch.Operations.Lro.List
-- ** cloudsearch.query.search
, module Network.Google.Resource.CloudSearch.Query.Search
-- ** cloudsearch.query.sources.list
, module Network.Google.Resource.CloudSearch.Query.Sources.List
-- ** cloudsearch.query.suggest
, module Network.Google.Resource.CloudSearch.Query.Suggest
-- ** cloudsearch.settings.datasources.create
, module Network.Google.Resource.CloudSearch.Settings.Datasources.Create
-- ** cloudsearch.settings.datasources.delete
, module Network.Google.Resource.CloudSearch.Settings.Datasources.Delete
-- ** cloudsearch.settings.datasources.get
, module Network.Google.Resource.CloudSearch.Settings.Datasources.Get
-- ** cloudsearch.settings.datasources.list
, module Network.Google.Resource.CloudSearch.Settings.Datasources.List
-- ** cloudsearch.settings.datasources.update
, module Network.Google.Resource.CloudSearch.Settings.Datasources.Update
-- ** cloudsearch.settings.getCustomer
, module Network.Google.Resource.CloudSearch.Settings.GetCustomer
-- ** cloudsearch.settings.searchapplications.create
, module Network.Google.Resource.CloudSearch.Settings.SearchApplications.Create
-- ** cloudsearch.settings.searchapplications.delete
, module Network.Google.Resource.CloudSearch.Settings.SearchApplications.Delete
-- ** cloudsearch.settings.searchapplications.get
, module Network.Google.Resource.CloudSearch.Settings.SearchApplications.Get
-- ** cloudsearch.settings.searchapplications.list
, module Network.Google.Resource.CloudSearch.Settings.SearchApplications.List
-- ** cloudsearch.settings.searchapplications.reset
, module Network.Google.Resource.CloudSearch.Settings.SearchApplications.Reset
-- ** cloudsearch.settings.searchapplications.update
, module Network.Google.Resource.CloudSearch.Settings.SearchApplications.Update
-- ** cloudsearch.settings.updateCustomer
, module Network.Google.Resource.CloudSearch.Settings.UpdateCustomer
-- ** cloudsearch.stats.getIndex
, module Network.Google.Resource.CloudSearch.Stats.GetIndex
-- ** cloudsearch.stats.getQuery
, module Network.Google.Resource.CloudSearch.Stats.GetQuery
-- ** cloudsearch.stats.getSession
, module Network.Google.Resource.CloudSearch.Stats.GetSession
-- ** cloudsearch.stats.getUser
, module Network.Google.Resource.CloudSearch.Stats.GetUser
-- ** cloudsearch.stats.index.datasources.get
, module Network.Google.Resource.CloudSearch.Stats.Index.Datasources.Get
-- ** cloudsearch.stats.query.searchapplications.get
, module Network.Google.Resource.CloudSearch.Stats.Query.SearchApplications.Get
-- ** cloudsearch.stats.session.searchapplications.get
, module Network.Google.Resource.CloudSearch.Stats.Session.SearchApplications.Get
-- ** cloudsearch.stats.user.searchapplications.get
, module Network.Google.Resource.CloudSearch.Stats.User.SearchApplications.Get
-- * Types
-- ** QueryInterpretationReason
, QueryInterpretationReason (..)
-- ** DriveMimeTypeRestrictType
, DriveMimeTypeRestrictType (..)
-- ** DriveFollowUpRestrictType
, DriveFollowUpRestrictType (..)
-- ** ItemContent
, ItemContent
, itemContent
, icHash
, icContentFormat
, icContentDataRef
, icInlineContent
-- ** CustomerUserStats
, CustomerUserStats
, customerUserStats
, cusThirtyDaysActiveUsersCount
, cusDate
, cusOneDayActiveUsersCount
, cusSevenDaysActiveUsersCount
-- ** Photo
, Photo
, photo
, pURL
-- ** SearchItemsByViewURLRequest
, SearchItemsByViewURLRequest
, searchItemsByViewURLRequest
, sibvurDebugOptions
, sibvurPageToken
, sibvurViewURL
-- ** SearchApplication
, SearchApplication
, searchApplication
, saDataSourceRestrictions
, saOperationIds
, saDefaultFacetOptions
, saScoringConfig
, saName
, saSourceConfig
, saQueryInterpretationConfig
, saDisplayName
, saEnableAuditLog
, saDefaultSortOptions
-- ** BooleanOperatorOptions
, BooleanOperatorOptions
, booleanOperatorOptions
, booOperatorName
-- ** Status
, Status
, status
, sDetails
, sCode
, sMessage
-- ** EnumPropertyOptions
, EnumPropertyOptions
, enumPropertyOptions
, epoPossibleValues
, epoOrderedRanking
, epoOperatorOptions
-- ** ItemStatusCode
, ItemStatusCode (..)
-- ** UnreserveItemsRequest
, UnreserveItemsRequest
, unreserveItemsRequest
, uirQueue
, uirDebugOptions
, uirConnectorName
-- ** DateOperatorOptions
, DateOperatorOptions
, dateOperatorOptions
, dooOperatorName
, dooLessThanOperatorName
, dooGreaterThanOperatorName
-- ** GetDataSourceIndexStatsResponse
, GetDataSourceIndexStatsResponse
, getDataSourceIndexStatsResponse
, gdsisrStats
-- ** ObjectDisplayOptions
, ObjectDisplayOptions
, objectDisplayOptions
, odoMetalines
, odoObjectDisplayLabel
-- ** QueryItem
, QueryItem
, queryItem
, qiIsSynthetic
-- ** CustomerQueryStats
, CustomerQueryStats
, customerQueryStats
, cqsQueryCountByStatus
, cqsDate
-- ** ListUnmAppedIdentitiesResponse
, ListUnmAppedIdentitiesResponse
, listUnmAppedIdentitiesResponse
, luairNextPageToken
, luairUnmAppedIdentities
-- ** DeleteQueueItemsRequest
, DeleteQueueItemsRequest
, deleteQueueItemsRequest
, dqirQueue
, dqirDebugOptions
, dqirConnectorName
-- ** SearchResult
, SearchResult
, searchResult
, srDebugInfo
, srSnippet
, srURL
, srClusteredResults
, srMetadata
, srTitle
-- ** SpellResult
, SpellResult
, spellResult
, srSuggestedQuery
-- ** PeopleSuggestion
, PeopleSuggestion
, peopleSuggestion
, psPerson
-- ** QuerySource
, QuerySource
, querySource
, qsShortName
, qsDisplayName
, qsSource
, qsOperators
-- ** ListOperationsResponse
, ListOperationsResponse
, listOperationsResponse
, lorNextPageToken
, lorOperations
-- ** SuggestResponse
, SuggestResponse
, suggestResponse
, srSuggestResults
-- ** GetSearchApplicationQueryStatsResponse
, GetSearchApplicationQueryStatsResponse
, getSearchApplicationQueryStatsResponse
, gsaqsrStats
-- ** RepositoryError
, RepositoryError
, repositoryError
, reHTTPStatusCode
, reType
, reErrorMessage
-- ** QueryInterpretationInterpretationType
, QueryInterpretationInterpretationType (..)
-- ** HTMLPropertyOptions
, HTMLPropertyOptions
, htmlPropertyOptions
, hpoRetrievalImportance
, hpoOperatorOptions
-- ** PropertyDefinition
, PropertyDefinition
, propertyDefinition
, pdIsSuggestable
, pdEnumPropertyOptions
, pdHTMLPropertyOptions
, pdObjectPropertyOptions
, pdIsReturnable
, pdTimestampPropertyOptions
, pdIntegerPropertyOptions
, pdIsWildcardSearchable
, pdName
, pdIsRepeatable
, pdDoublePropertyOptions
, pdDisplayOptions
, pdTextPropertyOptions
, pdIsSortable
, pdIsFacetable
, pdBooleanPropertyOptions
, pdDatePropertyOptions
-- ** SortOptions
, SortOptions
, sortOptions
, soSortOrder
, soOperatorName
-- ** ObjectPropertyOptions
, ObjectPropertyOptions
, objectPropertyOptions
, opoSubobjectProperties
-- ** InteractionType
, InteractionType (..)
-- ** QueryOperatorType
, QueryOperatorType (..)
-- ** SearchResponse
, SearchResponse
, searchResponse
, sSpellResults
, sFacetResults
, sDebugInfo
, sResults
, sHasMoreResults
, sResultCounts
, sResultCountExact
, sResultCountEstimate
, sQueryInterpretation
, sStructuredResults
, sErrorInfo
-- ** SuggestResult
, SuggestResult
, suggestResult
, sPeopleSuggestion
, sQuerySuggestion
, sSuggestedQuery
, sSource
-- ** TextValues
, TextValues
, textValues
, tvValues
-- ** DriveLocationRestrict
, DriveLocationRestrict
, driveLocationRestrict
, dlrType
-- ** ListQuerySourcesResponse
, ListQuerySourcesResponse
, listQuerySourcesResponse
, lqsrNextPageToken
, lqsrSources
-- ** DoubleOperatorOptions
, DoubleOperatorOptions
, doubleOperatorOptions
, dOperatorName
-- ** Operation
, Operation
, operation
, oDone
, oError
, oResponse
, oName
, oMetadata
-- ** Person
, Person
, person
, pEmailAddresses
, pPersonNames
, pPhotos
, pName
, pObfuscatedId
-- ** CompositeFilter
, CompositeFilter
, compositeFilter
, cfSubFilters
, cfLogicOperator
-- ** ResultDisplayLine
, ResultDisplayLine
, resultDisplayLine
, rdlFields
-- ** DoubleValues
, DoubleValues
, doubleValues
, dvValues
-- ** Snippet
, Snippet
, snippet
, sMatchRanges
, sSnippet
-- ** TextOperatorOptions
, TextOperatorOptions
, textOperatorOptions
, tooOperatorName
, tooExactMatchWithOperator
-- ** EnumPropertyOptionsOrderedRanking
, EnumPropertyOptionsOrderedRanking (..)
-- ** QueryInterpretationOptions
, QueryInterpretationOptions
, queryInterpretationOptions
, qioDisableNlInterpretation
, qioEnableVerbatimMode
, qioDisableSupplementalResults
-- ** ResetSearchApplicationRequest
, ResetSearchApplicationRequest
, resetSearchApplicationRequest
, rsarDebugOptions
-- ** IntegerPropertyOptionsOrderedRanking
, IntegerPropertyOptionsOrderedRanking (..)
-- ** ItemMetadata
, ItemMetadata
, itemMetadata
, imSourceRepositoryURL
, imHash
, imObjectType
, imContainerName
, imInteractions
, imMimeType
, imUpdateTime
, imKeywords
, imContextAttributes
, imTitle
, imContentLanguage
, imSearchQualityMetadata
, imCreateTime
-- ** FilterOptions
, FilterOptions
, filterOptions
, foObjectType
, foFilter
-- ** StructuredResult
, StructuredResult
, structuredResult
, srPerson
-- ** ProcessingError
, ProcessingError
, processingError
, peFieldViolations
, peCode
, peErrorMessage
-- ** PushItemType
, PushItemType (..)
-- ** ListItemNamesForUnmAppedIdentityResponse
, ListItemNamesForUnmAppedIdentityResponse
, listItemNamesForUnmAppedIdentityResponse
, linfuairNextPageToken
, linfuairItemNames
-- ** CompositeFilterLogicOperator
, CompositeFilterLogicOperator (..)
-- ** ItemACL
, ItemACL
, itemACL
, iaOwners
, iaReaders
, iaACLInheritanceType
, iaInheritACLFrom
, iaDeniedReaders
-- ** Value
, Value
, value
, vIntegerValue
, vTimestampValue
, vDoubleValue
, vStringValue
, vDateValue
, vBooleanValue
-- ** FieldViolation
, FieldViolation
, fieldViolation
, fvField
, fvDescription
-- ** SourceScoringConfigSourceImportance
, SourceScoringConfigSourceImportance (..)
-- ** Metaline
, Metaline
, metaline
, mProperties
-- ** SearchApplicationSessionStats
, SearchApplicationSessionStats
, searchApplicationSessionStats
, sassSearchSessionsCount
, sassDate
-- ** FacetBucket
, FacetBucket
, facetBucket
, fbValue
, fbCount
, fbPercentage
-- ** StatusDetailsItem
, StatusDetailsItem
, statusDetailsItem
, sdiAddtional
-- ** AuditLoggingSettings
, AuditLoggingSettings
, auditLoggingSettings
, alsProject
, alsLogDataReadActions
, alsLogDataWriteActions
, alsLogAdminReadActions
-- ** RetrievalImportanceImportance
, RetrievalImportanceImportance (..)
-- ** TimestampOperatorOptions
, TimestampOperatorOptions
, timestampOperatorOptions
, tOperatorName
, tLessThanOperatorName
, tGreaterThanOperatorName
-- ** ProcessingErrorCode
, ProcessingErrorCode (..)
-- ** GetSearchApplicationSessionStatsResponse
, GetSearchApplicationSessionStatsResponse
, getSearchApplicationSessionStatsResponse
, gsassrStats
-- ** IntegerOperatorOptions
, IntegerOperatorOptions
, integerOperatorOptions
, iooOperatorName
, iooLessThanOperatorName
, iooGreaterThanOperatorName
-- ** SortOptionsSortOrder
, SortOptionsSortOrder (..)
-- ** QuerySuggestion
, QuerySuggestion
, querySuggestion
-- ** ListSearchApplicationsResponse
, ListSearchApplicationsResponse
, listSearchApplicationsResponse
, lsarNextPageToken
, lsarSearchApplications
-- ** ResultDebugInfo
, ResultDebugInfo
, resultDebugInfo
, rdiFormattedDebugInfo
-- ** QueryCountByStatus
, QueryCountByStatus
, queryCountByStatus
, qcbsCount
, qcbsStatusCode
-- ** ItemCountByStatus
, ItemCountByStatus
, itemCountByStatus
, icbsCount
, icbsStatusCode
-- ** TimestampPropertyOptions
, TimestampPropertyOptions
, timestampPropertyOptions
, tpoOperatorOptions
-- ** ResultCounts
, ResultCounts
, resultCounts
, rcSourceResultCounts
-- ** FreshnessOptions
, FreshnessOptions
, freshnessOptions
, foFreshnessDuration
, foFreshnessProperty
-- ** DebugOptions
, DebugOptions
, debugOptions
, doEnableDebugging
-- ** IntegerPropertyOptions
, IntegerPropertyOptions
, integerPropertyOptions
, ipoMaximumValue
, ipoOrderedRanking
, ipoMinimumValue
, ipoOperatorOptions
-- ** DataSourceRestriction
, DataSourceRestriction
, dataSourceRestriction
, dsrFilterOptions
, dsrSource
-- ** Schema
, Schema
, schema
, sObjectDefinitions
, sOperationIds
-- ** GetSearchApplicationUserStatsResponse
, GetSearchApplicationUserStatsResponse
, getSearchApplicationUserStatsResponse
, gsausrStats
-- ** DriveTimeSpanRestrict
, DriveTimeSpanRestrict
, driveTimeSpanRestrict
, dtsrType
-- ** ResultDisplayField
, ResultDisplayField
, resultDisplayField
, rdfProperty
, rdfOperatorName
, rdfLabel
-- ** UpdateSchemaRequest
, UpdateSchemaRequest
, updateSchemaRequest
, usrValidateOnly
, usrSchema
, usrDebugOptions
-- ** Date
, Date
, date
, dDay
, dYear
, dMonth
-- ** IndexingDatasourcesItemsDeleteMode
, IndexingDatasourcesItemsDeleteMode (..)
-- ** DisplayedProperty
, DisplayedProperty
, displayedProperty
, dpPropertyName
-- ** FacetResult
, FacetResult
, facetResult
, frSourceName
, frBuckets
, frObjectType
, frOperatorName
-- ** DriveMimeTypeRestrict
, DriveMimeTypeRestrict
, driveMimeTypeRestrict
, dmtrType
-- ** StructuredDataObject
, StructuredDataObject
, structuredDataObject
, sdoProperties
-- ** Media
, Media
, media
, mResourceName
-- ** ItemStructuredData
, ItemStructuredData
, itemStructuredData
, isdHash
, isdObject
-- ** DriveFollowUpRestrict
, DriveFollowUpRestrict
, driveFollowUpRestrict
, dfurType
-- ** DateValues
, DateValues
, dateValues
, dValues
-- ** SearchApplicationUserStats
, SearchApplicationUserStats
, searchApplicationUserStats
, sausThirtyDaysActiveUsersCount
, sausDate
, sausOneDayActiveUsersCount
, sausSevenDaysActiveUsersCount
-- ** NamedProperty
, NamedProperty
, namedProperty
, npDoubleValues
, npTextValues
, npDateValues
, npName
, npBooleanValue
, npObjectValues
, npHTMLValues
, npEnumValues
, npTimestampValues
, npIntegerValues
-- ** DebugIdentitysourcesUnmAppedidsListResolutionStatusCode
, DebugIdentitysourcesUnmAppedidsListResolutionStatusCode (..)
-- ** MatchRange
, MatchRange
, matchRange
, mrStart
, mrEnd
-- ** ResponseDebugInfo
, ResponseDebugInfo
, responseDebugInfo
, rFormattedDebugInfo
-- ** ListDataSourceResponse
, ListDataSourceResponse
, listDataSourceResponse
, ldsrNextPageToken
, ldsrSources
-- ** SearchRequest
, SearchRequest
, searchRequest
, srSortOptions
, srDataSourceRestrictions
, srQueryInterpretationOptions
, srStart
, srQuery
, srFacetOptions
, srContextAttributes
, srPageSize
, srRequestOptions
-- ** Name
, Name
, name
, nDisplayName
-- ** SourceResultCount
, SourceResultCount
, sourceResultCount
, srcHasMoreResults
, srcResultCountExact
, srcResultCountEstimate
, srcSource
-- ** PollItemsRequestStatusCodesItem
, PollItemsRequestStatusCodesItem (..)
-- ** SearchApplicationQueryStats
, SearchApplicationQueryStats
, searchApplicationQueryStats
, saqsQueryCountByStatus
, saqsDate
-- ** SourceConfig
, SourceConfig
, sourceConfig
, scCrowdingConfig
, scScoringConfig
, scSource
-- ** UnmAppedIdentityResolutionStatusCode
, UnmAppedIdentityResolutionStatusCode (..)
-- ** ListItemsResponse
, ListItemsResponse
, listItemsResponse
, lirNextPageToken
, lirItems
-- ** ScoringConfig
, ScoringConfig
, scoringConfig
, scDisablePersonalization
, scDisableFreshness
-- ** StartUploadItemRequest
, StartUploadItemRequest
, startUploadItemRequest
, suirDebugOptions
, suirConnectorName
-- ** UploadItemRef
, UploadItemRef
, uploadItemRef
, uirName
-- ** PushItemRequest
, PushItemRequest
, pushItemRequest
, pirDebugOptions
, pirConnectorName
, pirItem
-- ** GetCustomerQueryStatsResponse
, GetCustomerQueryStatsResponse
, getCustomerQueryStatsResponse
, gcqsrStats
-- ** DoublePropertyOptions
, DoublePropertyOptions
, doublePropertyOptions
, dpoOperatorOptions
-- ** QueryOperator
, QueryOperator
, queryOperator
, qoIsSuggestable
, qoIsReturnable
, qoObjectType
, qoIsRepeatable
, qoOperatorName
, qoIsSortable
, qoIsFacetable
, qoDisplayName
, qoLessThanOperatorName
, qoType
, qoEnumValues
, qoGreaterThanOperatorName
-- ** Xgafv
, Xgafv (..)
-- ** PollItemsResponse
, PollItemsResponse
, pollItemsResponse
, pirItems
-- ** SourceScoringConfig
, SourceScoringConfig
, sourceScoringConfig
, sscSourceImportance
-- ** EmailAddress
, EmailAddress
, emailAddress
, eaEmailAddress
-- ** RetrievalImportance
, RetrievalImportance
, retrievalImportance
, riImportance
-- ** DataSourceIndexStats
, DataSourceIndexStats
, dataSourceIndexStats
, dsisItemCountByStatus
, dsisDate
-- ** SourceCrowdingConfig
, SourceCrowdingConfig
, sourceCrowdingConfig
, sccNumSuggestions
, sccNumResults
-- ** QueryInterpretationConfig
, QueryInterpretationConfig
, queryInterpretationConfig
, qicForceDisableSupplementalResults
, qicForceVerbatimMode
-- ** Interaction
, Interaction
, interaction
, iInteractionTime
, iPrincipal
, iType
-- ** Principal
, Principal
, principal
, pUserResourceName
, pGroupResourceName
, pGsuitePrincipal
-- ** IndexItemRequest
, IndexItemRequest
, indexItemRequest
, iirMode
, iirDebugOptions
, iirConnectorName
, iirItem
, iirIndexItemOptions
-- ** RepositoryErrorType
, RepositoryErrorType (..)
-- ** TextPropertyOptions
, TextPropertyOptions
, textPropertyOptions
, tRetrievalImportance
, tOperatorOptions
-- ** EnumValuePair
, EnumValuePair
, enumValuePair
, evpIntegerValue
, evpStringValue
-- ** DriveLocationRestrictType
, DriveLocationRestrictType (..)
-- ** Metadata
, Metadata
, metadata
, mObjectType
, mOwner
, mMimeType
, mUpdateTime
, mDisplayOptions
, mSource
, mCreateTime
, mFields
-- ** UpdateDataSourceRequest
, UpdateDataSourceRequest
, updateDataSourceRequest
, udsrDebugOptions
, udsrSource
-- ** DataSource
, DataSource
, dataSource
, dsShortName
, dsItemsVisibility
, dsOperationIds
, dsDisableServing
, dsIndexingServiceAccounts
, dsDisableModifications
, dsName
, dsDisplayName
-- ** HTMLValues
, HTMLValues
, htmlValues
, hvValues
-- ** HTMLOperatorOptions
, HTMLOperatorOptions
, htmlOperatorOptions
, hooOperatorName
-- ** Item
, Item
, item
, iStatus
, iItemType
, iPayload
, iStructuredData
, iQueue
, iContent
, iName
, iVersion
, iMetadata
, iACL
-- ** Source
, Source
, source
, sName
, sPredefinedSource
-- ** PropertyDisplayOptions
, PropertyDisplayOptions
, propertyDisplayOptions
, pdoDisplayLabel
-- ** ObjectValues
, ObjectValues
, objectValues
, ovValues
-- ** ObjectOptions
, ObjectOptions
, objectOptions
, ooFreshnessOptions
, ooDisplayOptions
-- ** FacetOptions
, FacetOptions
, facetOptions
, fSourceName
, fObjectType
, fNumFacetBuckets
, fOperatorName
-- ** SuggestRequest
, SuggestRequest
, suggestRequest
, sDataSourceRestrictions
, sQuery
, sRequestOptions
-- ** Filter
, Filter
, filter'
, fCompositeFilter
, fValueFilter
-- ** ObjectDefinition
, ObjectDefinition
, objectDefinition
, odName
, odOptions
, odPropertyDefinitions
-- ** ItemItemType
, ItemItemType (..)
-- ** ItemCountByStatusStatusCode
, ItemCountByStatusStatusCode (..)
-- ** VPCSettings
, VPCSettings
, vpcSettings
, vsProject
-- ** PollItemsRequest
, PollItemsRequest
, pollItemsRequest
, pQueue
, pDebugOptions
, pConnectorName
, pStatusCodes
, pLimit
-- ** QueryInterpretation
, QueryInterpretation
, queryInterpretation
, qiInterpretedQuery
, qiReason
, qiInterpretationType
-- ** UnmAppedIdentity
, UnmAppedIdentity
, unmAppedIdentity
, uaiResolutionStatusCode
, uaiExternalIdentity
-- ** OperationMetadata
, OperationMetadata
, operationMetadata
, omAddtional
-- ** CustomerIndexStats
, CustomerIndexStats
, customerIndexStats
, cisItemCountByStatus
, cisDate
-- ** PushItem
, PushItem
, pushItem
, piRepositoryError
, piContentHash
, piStructuredDataHash
, piPayload
, piQueue
, piMetadataHash
, piType
-- ** CustomerSessionStats
, CustomerSessionStats
, customerSessionStats
, cssSearchSessionsCount
, cssDate
-- ** ErrorMessage
, ErrorMessage
, errorMessage
, emSource
, emErrorMessage
-- ** BooleanPropertyOptions
, BooleanPropertyOptions
, booleanPropertyOptions
, bpoOperatorOptions
-- ** ValueFilter
, ValueFilter
, valueFilter
, vfValue
, vfOperatorName
-- ** EnumOperatorOptions
, EnumOperatorOptions
, enumOperatorOptions
, eooOperatorName
-- ** ContextAttribute
, ContextAttribute
, contextAttribute
, caValues
, caName
-- ** DatePropertyOptions
, DatePropertyOptions
, datePropertyOptions
, dOperatorOptions
-- ** EnumValues
, EnumValues
, enumValues
, evValues
-- ** GSuitePrincipal
, GSuitePrincipal
, gSuitePrincipal
, gspGsuiteGroupEmail
, gspGsuiteUserEmail
, gspGsuiteDomain
-- ** IndexItemOptions
, IndexItemOptions
, indexItemOptions
, iioAllowUnknownGsuitePrincipals
-- ** SearchQualityMetadata
, SearchQualityMetadata
, searchQualityMetadata
, sqmQuality
-- ** ItemContentContentFormat
, ItemContentContentFormat (..)
-- ** TimestampValues
, TimestampValues
, timestampValues
, tValues
-- ** ErrorInfo
, ErrorInfo
, errorInfo
, eiErrorMessages
-- ** ResultDisplayMetadata
, ResultDisplayMetadata
, resultDisplayMetadata
, rdmMetalines
, rdmObjectTypeLabel
-- ** OperationResponse
, OperationResponse
, operationResponse
, orAddtional
-- ** IntegerValues
, IntegerValues
, integerValues
, ivValues
-- ** GetCustomerSessionStatsResponse
, GetCustomerSessionStatsResponse
, getCustomerSessionStatsResponse
, gcssrStats
-- ** SearchItemsByViewURLResponse
, SearchItemsByViewURLResponse
, searchItemsByViewURLResponse
, sibvurNextPageToken
, sibvurItems
-- ** SourcePredefinedSource
, SourcePredefinedSource (..)
-- ** GetCustomerIndexStatsResponse
, GetCustomerIndexStatsResponse
, getCustomerIndexStatsResponse
, gcisrStats
-- ** RestrictItem
, RestrictItem
, restrictItem
, riDriveLocationRestrict
, riDriveTimeSpanRestrict
, riDriveMimeTypeRestrict
, riDriveFollowUpRestrict
, riSearchOperator
-- ** DriveTimeSpanRestrictType
, DriveTimeSpanRestrictType (..)
-- ** IndexItemRequestMode
, IndexItemRequestMode (..)
-- ** CustomerSettings
, CustomerSettings
, customerSettings
, csAuditLoggingSettings
, csVPCSettings
-- ** ItemACLACLInheritanceType
, ItemACLACLInheritanceType (..)
-- ** CheckAccessResponse
, CheckAccessResponse
, checkAccessResponse
, carHasAccess
-- ** RequestOptions
, RequestOptions
, requestOptions
, roLanguageCode
, roDebugOptions
, roTimeZone
, roSearchApplicationId
-- ** ItemStatus
, ItemStatus
, itemStatus
, isProcessingErrors
, isCode
, isRepositoryErrors
-- ** GetCustomerUserStatsResponse
, GetCustomerUserStatsResponse
, getCustomerUserStatsResponse
, gcusrStats
) where
import Network.Google.Prelude
import Network.Google.CloudSearch.Types
import Network.Google.Resource.CloudSearch.Debug.Datasources.Items.CheckAccess
import Network.Google.Resource.CloudSearch.Debug.Datasources.Items.SearchByViewURL
import Network.Google.Resource.CloudSearch.Debug.Datasources.Items.UnmAppedids.List
import Network.Google.Resource.CloudSearch.Debug.Identitysources.Items.ListForunmAppedidentity
import Network.Google.Resource.CloudSearch.Debug.Identitysources.UnmAppedids.List
import Network.Google.Resource.CloudSearch.Indexing.Datasources.DeleteSchema
import Network.Google.Resource.CloudSearch.Indexing.Datasources.GetSchema
import Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.Delete
import Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.DeleteQueueItems
import Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.Get
import Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.Index
import Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.List
import Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.Poll
import Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.Push
import Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.Unreserve
import Network.Google.Resource.CloudSearch.Indexing.Datasources.Items.Upload
import Network.Google.Resource.CloudSearch.Indexing.Datasources.UpdateSchema
import Network.Google.Resource.CloudSearch.Media.Upload
import Network.Google.Resource.CloudSearch.Operations.Get
import Network.Google.Resource.CloudSearch.Operations.Lro.List
import Network.Google.Resource.CloudSearch.Query.Search
import Network.Google.Resource.CloudSearch.Query.Sources.List
import Network.Google.Resource.CloudSearch.Query.Suggest
import Network.Google.Resource.CloudSearch.Settings.Datasources.Create
import Network.Google.Resource.CloudSearch.Settings.Datasources.Delete
import Network.Google.Resource.CloudSearch.Settings.Datasources.Get
import Network.Google.Resource.CloudSearch.Settings.Datasources.List
import Network.Google.Resource.CloudSearch.Settings.Datasources.Update
import Network.Google.Resource.CloudSearch.Settings.GetCustomer
import Network.Google.Resource.CloudSearch.Settings.SearchApplications.Create
import Network.Google.Resource.CloudSearch.Settings.SearchApplications.Delete
import Network.Google.Resource.CloudSearch.Settings.SearchApplications.Get
import Network.Google.Resource.CloudSearch.Settings.SearchApplications.List
import Network.Google.Resource.CloudSearch.Settings.SearchApplications.Reset
import Network.Google.Resource.CloudSearch.Settings.SearchApplications.Update
import Network.Google.Resource.CloudSearch.Settings.UpdateCustomer
import Network.Google.Resource.CloudSearch.Stats.GetIndex
import Network.Google.Resource.CloudSearch.Stats.GetQuery
import Network.Google.Resource.CloudSearch.Stats.GetSession
import Network.Google.Resource.CloudSearch.Stats.GetUser
import Network.Google.Resource.CloudSearch.Stats.Index.Datasources.Get
import Network.Google.Resource.CloudSearch.Stats.Query.SearchApplications.Get
import Network.Google.Resource.CloudSearch.Stats.Session.SearchApplications.Get
import Network.Google.Resource.CloudSearch.Stats.User.SearchApplications.Get
{- $resources
TODO
-}
-- | Represents the entirety of the methods and resources available for the Cloud Search API service.
type CloudSearchAPI =
SettingsDatasourcesListResource :<|>
SettingsDatasourcesGetResource
:<|> SettingsDatasourcesCreateResource
:<|> SettingsDatasourcesDeleteResource
:<|> SettingsDatasourcesUpdateResource
:<|> SettingsSearchApplicationsListResource
:<|> SettingsSearchApplicationsGetResource
:<|> SettingsSearchApplicationsCreateResource
:<|> SettingsSearchApplicationsResetResource
:<|> SettingsSearchApplicationsDeleteResource
:<|> SettingsSearchApplicationsUpdateResource
:<|> SettingsGetCustomerResource
:<|> SettingsUpdateCustomerResource
:<|> StatsUserSearchApplicationsGetResource
:<|> StatsQuerySearchApplicationsGetResource
:<|> StatsIndexDatasourcesGetResource
:<|> StatsSessionSearchApplicationsGetResource
:<|> StatsGetQueryResource
:<|> StatsGetUserResource
:<|> StatsGetIndexResource
:<|> StatsGetSessionResource
:<|> DebugDatasourcesItemsUnmAppedidsListResource
:<|> DebugDatasourcesItemsCheckAccessResource
:<|> DebugDatasourcesItemsSearchByViewURLResource
:<|> DebugIdentitysourcesUnmAppedidsListResource
:<|>
DebugIdentitysourcesItemsListForunmAppedidentityResource
:<|> MediaUploadResource
:<|> QuerySourcesListResource
:<|> QuerySuggestResource
:<|> QuerySearchResource
:<|> OperationsLroListResource
:<|> OperationsGetResource
:<|> IndexingDatasourcesItemsListResource
:<|> IndexingDatasourcesItemsUnreserveResource
:<|> IndexingDatasourcesItemsGetResource
:<|> IndexingDatasourcesItemsDeleteQueueItemsResource
:<|> IndexingDatasourcesItemsPollResource
:<|> IndexingDatasourcesItemsUploadResource
:<|> IndexingDatasourcesItemsIndexResource
:<|> IndexingDatasourcesItemsPushResource
:<|> IndexingDatasourcesItemsDeleteResource
:<|> IndexingDatasourcesGetSchemaResource
:<|> IndexingDatasourcesUpdateSchemaResource
:<|> IndexingDatasourcesDeleteSchemaResource
|
brendanhay/gogol
|
gogol-cloudsearch/gen/Network/Google/CloudSearch.hs
|
mpl-2.0
| 35,762 | 0 | 47 | 7,597 | 3,865 | 2,782 | 1,083 | 926 | 0 |
{-# LANGUAGE TemplateHaskell, LambdaCase #-}
module Model.Identity.Types
( Identity(..)
, MonadHasIdentity
, extractFromIdentifiedSessOrDefault
, identityVerf
, identityAdmin
, identitySuperuser
) where
import qualified Data.ByteString as BS
import Has (Has(..), MonadHas)
import Model.Id.Types
import Model.Permission.Types
import Model.Party.Types
import Model.Token.Types
-- | Who is making the request that we are handling?
data Identity
= NotLoggedIn
-- ^ User may have an identity, but they have not established it yet
| IdentityNotNeeded
-- ^ We don't care what the user's identity is.
-- Used mainly for BackgroundContext, but also used when
-- running unprotected routes
| Identified Session
-- ^ An actual human user on a web browser. One of the other two return values
-- for 'determineIdentity'.
| ReIdentified SiteAuth
-- ^ Speculation: used in video conversion when sending results from the
-- compute cluster back to the system. Used as a 'su' to run actions as the
-- account who created the upload asset, instead of the anonymous account submitting the result?
-- | Get the SiteAuth for the Identity, which corresponds to what privileges the Identity has
-- within the site as well which Party/Account the Identity is
instance Has SiteAuth Identity where
view (Identified Session{ sessionAccountToken = AccountToken{ tokenAccount = t } }) = t
view (ReIdentified a) = a
view IdentityNotNeeded = nobodySiteAuth
view NotLoggedIn = nobodySiteAuth
instance Has Party Identity where
view = view . (view :: Identity -> SiteAuth)
instance Has (Id Party) Identity where
view = view . (view :: Identity -> SiteAuth)
instance Has Access Identity where
view = view . (view :: Identity -> SiteAuth)
-- | Used by an action that will reference the actor's identity in order to authorize the action being performed.
-- In some cases, this identity simply hasn't been established or was not resolved because the
-- context indicated that an identity wasn't needed.
type MonadHasIdentity c m = (MonadHas Identity c m, Has SiteAuth c, Has Party c, Has (Id Party) c, Has Access c)
-- | Extract a value from part of a session for Identified, otherwise use the default value
extractFromIdentifiedSessOrDefault :: a -> (Session -> a) -> Identity -> a
extractFromIdentifiedSessOrDefault z f = \case
Identified sess -> f sess
NotLoggedIn -> z
IdentityNotNeeded -> z
ReIdentified _ -> z
-- | Extract the secure token for state changing action, only available for logged in session identity
identityVerf :: Identity -> Maybe BS.ByteString
identityVerf = extractFromIdentifiedSessOrDefault Nothing (Just . sessionVerf)
identitySuperuserFor :: (Access -> Permission) -> Identity -> Bool
identitySuperuserFor f (Identified t) = sessionSuperuser t && f (view t) == PermissionADMIN
identitySuperuserFor _ (ReIdentified _) = True
identitySuperuserFor _ _ = False
identityAdmin :: Identity -> Bool
identityAdmin = identitySuperuserFor accessMember
identitySuperuser :: Identity -> Bool
identitySuperuser = identitySuperuserFor accessPermission
|
databrary/databrary
|
src/Model/Identity/Types.hs
|
agpl-3.0
| 3,108 | 0 | 14 | 538 | 553 | 310 | 243 | -1 | -1 |
import Test.HUnit
import System.Exit
import Algebra.Graph ( Graph, vertexCount, empty, vertex, vertices, edge, edges, overlay
, edgeList)
import Data.List (nub)
import TopoGraph
import TopoList(assertTopoListEq)
oneV = vertex 0
oneE = edge 0 1
loop = edge 0 0
island = vertices [0, 1]
path2 = edges [(0, 1), (1, 2)]
loop2 = edges [(0, 1), (1, 0)]
converge2 = edges [(0, 1), (2, 1)]
diverge2 = edges [(0, 1), (0, 2)]
v_loop_v = overlay loop $ edges [(1, 0), (0, 2)]
v_loop2_v = overlay (edges [(2, 0), (1, 3)]) loop2
testIsTopo :: (Int -> Graph Int -> Bool) -> Graph Int -> [Bool] -> Test
testIsTopo f g goldens
= TestCase $ assertEqual "" goldens (map (\n -> f n g) nodes)
where nodes = take (1 + vertexCount g) (iterate (+1) 0)
testIsTopoFirst = testIsTopo isTopoFirst
isTopoFirstTests = TestList [
testIsTopoFirst empty [True],
testIsTopoFirst oneV [True, True],
testIsTopoFirst oneE [True, False, True],
testIsTopoFirst loop [False, True],
testIsTopoFirst island [True, True, True],
testIsTopoFirst path2 [True, False, False, True],
testIsTopoFirst loop2 [False, False, True],
testIsTopoFirst converge2 [True, False, True, True],
testIsTopoFirst diverge2 [True, False, False, True],
testIsTopoFirst v_loop_v [False, True, False, True],
testIsTopoFirst v_loop2_v [False, False, True, False, True]]
testIsTopoLast = testIsTopo isTopoLast
isTopoLastTests = TestList [
testIsTopoLast empty [True],
testIsTopoLast oneV [True, True],
testIsTopoLast oneE [False, True, True],
testIsTopoLast loop [False, True],
testIsTopoLast island [True, True, True],
testIsTopoLast path2 [False, False, True, True],
testIsTopoLast loop2 [False, False, True],
testIsTopoLast converge2 [False, True, False, True],
testIsTopoLast diverge2 [False, True, True, True],
testIsTopoLast v_loop_v [False, False, True, True],
testIsTopoLast v_loop2_v [False, False, False, True, True]]
testFindTopo :: (Graph Int -> Maybe Int) -> Graph Int -> [Int] -> Test
testFindTopo f g candidates =
TestCase (case result of
Just a -> assertBool msg (a `elem` candidates)
Nothing -> assertBool msg (candidates == []))
where result = f g
msg = "Found " ++ show result ++ ", but candidates are " ++ show candidates
testFindTopoFirst = testFindTopo findTopoFirst
testFindTopoLast = testFindTopo findTopoLast
testFindTopoFirstTrue = testFindTopo (findTopoFirstTrue even)
testFindTopoLastTrue = testFindTopo (findTopoLastTrue even)
findTopoFirstTests =
TestList [ testFindTopoFirst empty []
, testFindTopoFirst oneV [0]
, testFindTopoFirst oneE [0]
, testFindTopoFirst loop [0]
, testFindTopoFirst island [0, 1]
, testFindTopoFirst path2 [0]
, testFindTopoFirst loop2 []
, testFindTopoFirst converge2 [0, 2]
, testFindTopoFirst diverge2 [0]
, testFindTopoFirst v_loop_v [1]
, testFindTopoFirst v_loop2_v [2]]
findTopoFirstTrueTests =
TestList [ testFindTopoFirstTrue empty []
, testFindTopoFirstTrue oneV [0]
, testFindTopoFirstTrue oneE [0]
, testFindTopoFirstTrue loop [0]
, testFindTopoFirstTrue island [0]
, testFindTopoFirstTrue path2 [0]
, testFindTopoFirstTrue loop2 []
, testFindTopoFirstTrue converge2 [0]
, testFindTopoFirstTrue diverge2 [0]
, testFindTopoFirstTrue v_loop_v []
, testFindTopoFirstTrue v_loop2_v [2]]
findTopoLastTests =
TestList [ testFindTopoLast empty []
, testFindTopoLast oneV [0]
, testFindTopoLast oneE [1]
, testFindTopoLast loop [0]
, testFindTopoLast island [0, 1]
, testFindTopoLast path2 [2]
, testFindTopoLast loop2 []
, testFindTopoLast converge2 [1]
, testFindTopoLast diverge2 [1, 2]
, testFindTopoLast v_loop_v [2]
, testFindTopoLast v_loop2_v [3]]
findTopoLastTrueTests =
TestList [ testFindTopoLastTrue empty []
, testFindTopoLastTrue oneV [0]
, testFindTopoLastTrue oneE []
, testFindTopoLastTrue loop [0]
, testFindTopoLastTrue island [0]
, testFindTopoLastTrue path2 [2]
, testFindTopoLastTrue loop2 []
, testFindTopoLastTrue converge2 []
, testFindTopoLastTrue diverge2 [2]
, testFindTopoLastTrue v_loop_v [2]
, testFindTopoLastTrue v_loop2_v []]
testTopoFoldl :: Graph Int -> [(Int, Int)] -> [(Int, Int)] -> Test
testTopoFoldl g topoEdges cyclicEdges =
let (topoList, cyclicGraph) = topoFoldl (flip (:)) [] g
in TestCase (do assertEqual "Duplicated topoList" (nub topoList) topoList
assertTopoListEq topoList topoEdges
assertEqual "" cyclicEdges (edgeList cyclicGraph))
topoFoldlTests = TestList [ testTopoFoldl empty [] []
, testTopoFoldl oneV [(0, 0)] []
, testTopoFoldl oneE [(1, 0)] []
, testTopoFoldl loop [(0, 0)] []
, testTopoFoldl island [(0, 0), (1, 1)] []
, testTopoFoldl path2 [(2, 1), (1, 0)] []
, testTopoFoldl loop2 [] [(0, 1), (1, 0)]
, testTopoFoldl converge2 [(1, 0), (1, 2)] []
, testTopoFoldl diverge2 [(1, 0), (2, 0)] []
, testTopoFoldl v_loop_v [(2, 0), (0, 1)] []
, testTopoFoldl v_loop2_v [(2, 2)] [(0, 1), (1, 0), (1, 3)]
]
testTopoFoldr :: Graph Int -> [(Int, Int)] -> [(Int, Int)] -> Test
testTopoFoldr g topoEdges cyclicEdges =
let (topoList, cyclicGraph) = topoFoldr (:) [] g
in TestCase (do assertEqual "Duplicated topoList" (nub topoList) topoList
assertTopoListEq topoList topoEdges
assertEqual "" cyclicEdges (edgeList cyclicGraph))
topoFoldrTests = TestList [ testTopoFoldr empty [] []
, testTopoFoldr oneV [(0, 0)] []
, testTopoFoldr oneE [(0, 1)] []
, testTopoFoldr loop [(0, 0)] []
, testTopoFoldr island [(0, 0), (1, 1)] []
, testTopoFoldr path2 [(0, 1), (1, 2)] []
, testTopoFoldr loop2 [] [(0, 1), (1, 0)]
, testTopoFoldr converge2 [(0, 1), (2, 1)] []
, testTopoFoldr diverge2 [(0, 1), (0, 2)] []
, testTopoFoldr v_loop_v [(1, 0), (0, 2)] []
, testTopoFoldr v_loop2_v [(3, 3)] [(0, 1), (1, 0), (2, 0)]]
allTests = TestList [ TestLabel "isTopoFirst" $ isTopoFirstTests
, TestLabel "isTopoLast" $ isTopoLastTests
, TestLabel "findTopoFirst" $ findTopoFirstTests
, TestLabel "findTopoFirstTrue" $ findTopoFirstTrueTests
, TestLabel "findTopoLast" $ findTopoLastTests
, TestLabel "findTopoLastTrue" $ findTopoLastTrueTests
, TestLabel "topoFoldlTests" $ topoFoldlTests
, TestLabel "topoFoldrTests" $ topoFoldrTests
]
main = do counts <- runTestTT allTests
if errors counts > 0 || failures counts > 0
then exitFailure
else exitSuccess
|
yjwen/hada
|
test/TestTopoGraph.hs
|
lgpl-3.0
| 7,473 | 0 | 13 | 2,254 | 2,516 | 1,396 | 1,120 | 152 | 2 |
{-# LANGUAGE GADTs #-}
module Network.Haskoin.Crypto.ExtendedKeys
( XPubKey(..)
, XPrvKey(..)
, ChainCode
, KeyIndex
, DerivationException(..)
, makeXPrvKey
, deriveXPubKey
, prvSubKey
, pubSubKey
, hardSubKey
, xPrvIsHard
, xPubIsHard
, xPrvChild
, xPubChild
, xPubID
, xPrvID
, xPubFP
, xPrvFP
, xPubAddr
, xPubExport
, xPrvExport
, xPubImport
, xPrvImport
, xPrvWif
-- Helpers
, prvSubKeys
, pubSubKeys
, hardSubKeys
, deriveAddr
, deriveAddrs
, deriveMSAddr
, deriveMSAddrs
, cycleIndex
-- Derivation paths
, DerivPathI(..)
, HardOrGeneric
, GenericOrSoft
, DerivPath
, AllHardPath
, SoftPath
, Bip32PathIndex (..)
, derivePath
, derivePubPath
, toHard
, toSoft
, toGeneric
, (++/)
, pathToStr
-- Derivation path parsing
, XKey(..)
, ParsedPath(..)
, parsePath
, parseHard
, parseSoft
, applyPath
, derivePathAddr
, derivePathAddrs
, derivePathMSAddr
, derivePathMSAddrs
, concatBip32Segments
) where
import Control.DeepSeq (NFData, rnf)
import Control.Monad (mzero, guard, unless, (<=<))
import Control.Exception (Exception, throw)
import qualified Crypto.Secp256k1 as EC
import Data.Aeson (Value(String), FromJSON, ToJSON, parseJSON, toJSON, withText)
import Data.Binary (Binary, get, put)
import Data.Binary.Get (Get, getWord8, getWord32be)
import Data.Binary.Put (Put, putWord8, putWord32be)
import Data.Word (Word8, Word32)
import Data.Bits (setBit, testBit, clearBit)
import Data.List.Split (splitOn)
import Data.Maybe (fromMaybe)
import Data.String (IsString, fromString)
import Data.String.Conversions (cs)
import Data.Typeable (Typeable)
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS (append, take)
import Text.Read (readPrec, parens, lexP, pfail)
import qualified Text.Read as Read (Lexeme(Ident, String))
import Network.Haskoin.Util
import Network.Haskoin.Constants
import Network.Haskoin.Script.Parser
import Network.Haskoin.Crypto.Keys
import Network.Haskoin.Crypto.Hash
import Network.Haskoin.Crypto.Base58
import Data.List (foldl')
{- See BIP32 for details: https://en.bitcoin.it/wiki/BIP_0032 -}
-- | A derivation exception is thrown in the very unlikely event that a
-- derivation is invalid.
data DerivationException = DerivationException String
deriving (Eq, Read, Show, Typeable)
instance Exception DerivationException
type ChainCode = Hash256
type KeyIndex = Word32
-- | Data type representing an extended BIP32 private key. An extended key
-- is a node in a tree of key derivations. It has a depth in the tree, a
-- parent node and an index to differentiate it from other siblings.
data XPrvKey = XPrvKey
{ xPrvDepth :: !Word8 -- ^ Depth in the tree of key derivations.
, xPrvParent :: !Word32 -- ^ Fingerprint of the parent key.
, xPrvIndex :: !KeyIndex -- ^ Key derivation index.
, xPrvChain :: !ChainCode -- ^ Chain code.
, xPrvKey :: !PrvKeyC -- ^ The private key of this extended key node.
} deriving (Eq)
-- TODO: Test
instance Show XPrvKey where
showsPrec d k = showParen (d > 10) $
showString "XPrvKey " . shows (xPrvExport k)
-- TODO: Test
instance Read XPrvKey where
readPrec = parens $ do
Read.Ident "XPrvKey" <- lexP
Read.String str <- lexP
maybe pfail return $ xPrvImport $ cs str
-- TODO: Test
instance IsString XPrvKey where
fromString =
fromMaybe e . xPrvImport . cs
where
e = error "Could not decode extended private key"
instance NFData XPrvKey where
rnf (XPrvKey d p i c k) =
rnf d `seq` rnf p `seq` rnf i `seq` rnf c `seq` rnf k
instance ToJSON XPrvKey where
toJSON = String . cs . xPrvExport
instance FromJSON XPrvKey where
parseJSON = withText "xprvkey" $ maybe mzero return . xPrvImport . cs
-- | Data type representing an extended BIP32 public key.
data XPubKey = XPubKey
{ xPubDepth :: !Word8 -- ^ Depth in the tree of key derivations.
, xPubParent :: !Word32 -- ^ Fingerprint of the parent key.
, xPubIndex :: !KeyIndex -- ^ Key derivation index.
, xPubChain :: !ChainCode -- ^ Chain code.
, xPubKey :: !PubKeyC -- ^ The public key of this extended key node.
} deriving (Eq)
-- TODO: Test
instance Show XPubKey where
showsPrec d k = showParen (d > 10) $
showString "XPubKey " . shows (xPubExport k)
-- TODO: Test
instance Read XPubKey where
readPrec = parens $ do
Read.Ident "XPubKey" <- lexP
Read.String str <- lexP
maybe pfail return $ xPubImport $ cs str
-- TODO: Test
instance IsString XPubKey where
fromString =
fromMaybe e . xPubImport . cs
where
e = error "Could not import extended public key"
instance NFData XPubKey where
rnf (XPubKey d p i c k) =
rnf d `seq` rnf p `seq` rnf i `seq` rnf c `seq` rnf k
instance ToJSON XPubKey where
toJSON = String . cs . xPubExport
instance FromJSON XPubKey where
parseJSON = withText "xpubkey" $ maybe mzero return . xPubImport . cs
-- | Build a BIP32 compatible extended private key from a bytestring. This will
-- produce a root node (depth=0 and parent=0).
makeXPrvKey :: ByteString -> XPrvKey
makeXPrvKey bs =
XPrvKey 0 0 0 c k
where
(p, c) = split512 $ hmac512 "Bitcoin seed" bs
k = fromMaybe err $ makePrvKeyC <$> EC.secKey (getHash256 p)
err = throw $ DerivationException "Invalid seed"
-- | Derive an extended public key from an extended private key. This function
-- will preserve the depth, parent, index and chaincode fields of the extended
-- private keys.
deriveXPubKey :: XPrvKey -> XPubKey
deriveXPubKey (XPrvKey d p i c k) = XPubKey d p i c (derivePubKey k)
-- | Compute a private, soft child key derivation. A private soft derivation
-- will allow the equivalent extended public key to derive the public key for
-- this child. Given a parent key /m/ and a derivation index /i/, this function
-- will compute m\/i\/.
--
-- Soft derivations allow for more flexibility such as read-only wallets.
-- However, care must be taken not the leak both the parent extended public key
-- and one of the extended child private keys as this would compromise the
-- extended parent private key.
prvSubKey :: XPrvKey -- ^ Extended parent private key
-> KeyIndex -- ^ Child derivation index
-> XPrvKey -- ^ Extended child private key
prvSubKey xkey child
| child >= 0 && child < 0x80000000 =
XPrvKey (xPrvDepth xkey + 1) (xPrvFP xkey) child c k
| otherwise = error "Invalid child derivation index"
where
pK = xPubKey $ deriveXPubKey xkey
msg = BS.append (encode' pK) (encode' child)
(a, c) = split512 $ hmac512 (encode' $ xPrvChain xkey) msg
k = fromMaybe err $ tweakPrvKeyC (xPrvKey xkey) a
err = throw $ DerivationException "Invalid prvSubKey derivation"
-- | Compute a public, soft child key derivation. Given a parent key /M/
-- and a derivation index /i/, this function will compute M\/i\/.
pubSubKey :: XPubKey -- ^ Extended Parent public key
-> KeyIndex -- ^ Child derivation index
-> XPubKey -- ^ Extended child public key
pubSubKey xKey child
| child >= 0 && child < 0x80000000 =
XPubKey (xPubDepth xKey + 1) (xPubFP xKey) child c pK
| otherwise = error "Invalid child derivation index"
where
msg = BS.append (encode' $ xPubKey xKey) (encode' child)
(a, c) = split512 $ hmac512 (encode' $ xPubChain xKey) msg
pK = fromMaybe err $ tweakPubKeyC (xPubKey xKey) a
err = throw $ DerivationException "Invalid pubSubKey derivation"
-- | Compute a hard child key derivation. Hard derivations can only be computed
-- for private keys. Hard derivations do not allow the parent public key to
-- derive the child public keys. However, they are safer as a breach of the
-- parent public key and child private keys does not lead to a breach of the
-- parent private key. Given a parent key /m/ and a derivation index /i/, this
-- function will compute m\/i'\/.
hardSubKey :: XPrvKey -- ^ Extended Parent private key
-> KeyIndex -- ^ Child derivation index
-> XPrvKey -- ^ Extended child private key
hardSubKey xkey child
| child >= 0 && child < 0x80000000 =
XPrvKey (xPrvDepth xkey + 1) (xPrvFP xkey) i c k
| otherwise = error "Invalid child derivation index"
where
i = setBit child 31
msg = BS.append (bsPadPrvKey $ xPrvKey xkey) (encode' i)
(a, c) = split512 $ hmac512 (encode' $ xPrvChain xkey) msg
k = fromMaybe err $ tweakPrvKeyC (xPrvKey xkey) a
err = throw $ DerivationException "Invalid hardSubKey derivation"
-- | Returns True if the extended private key was derived through a hard
-- derivation.
xPrvIsHard :: XPrvKey -> Bool
xPrvIsHard k = testBit (xPrvIndex k) 31
-- | Returns True if the extended public key was derived through a hard
-- derivation.
xPubIsHard :: XPubKey -> Bool
xPubIsHard k = testBit (xPubIndex k) 31
-- | Returns the derivation index of this extended private key without the hard
-- bit set.
xPrvChild :: XPrvKey -> KeyIndex
xPrvChild k = clearBit (xPrvIndex k) 31
-- | Returns the derivation index of this extended public key without the hard
-- bit set.
xPubChild :: XPubKey -> KeyIndex
xPubChild k = clearBit (xPubIndex k) 31
-- | Computes the key identifier of an extended private key.
xPrvID :: XPrvKey -> Hash160
xPrvID = xPubID . deriveXPubKey
-- | Computes the key identifier of an extended public key.
xPubID :: XPubKey -> Hash160
xPubID = hash160 . getHash256 . hash256 . encode' . xPubKey
-- | Computes the key fingerprint of an extended private key.
xPrvFP :: XPrvKey -> Word32
xPrvFP = decode' . BS.take 4 . getHash160 . xPrvID
-- | Computes the key fingerprint of an extended public key.
xPubFP :: XPubKey -> Word32
xPubFP = decode' . BS.take 4 . getHash160 . xPubID
-- | Computer the 'Address' of an extended public key.
xPubAddr :: XPubKey -> Address
xPubAddr = pubKeyAddr . xPubKey
-- | Exports an extended private key to the BIP32 key export format (base 58).
xPrvExport :: XPrvKey -> ByteString
xPrvExport = encodeBase58Check . encode'
-- | Exports an extended public key to the BIP32 key export format (base 58).
xPubExport :: XPubKey -> ByteString
xPubExport = encodeBase58Check . encode'
-- | Decodes a BIP32 encoded extended private key. This function will fail if
-- invalid base 58 characters are detected or if the checksum fails.
xPrvImport :: ByteString -> Maybe XPrvKey
xPrvImport = decodeToMaybe <=< decodeBase58Check
-- | Decodes a BIP32 encoded extended public key. This function will fail if
-- invalid base 58 characters are detected or if the checksum fails.
xPubImport :: ByteString -> Maybe XPubKey
xPubImport = decodeToMaybe <=< decodeBase58Check
-- | Export an extended private key to WIF (Wallet Import Format).
xPrvWif :: XPrvKey -> ByteString
xPrvWif = toWif . xPrvKey
instance Binary XPrvKey where
get = do
ver <- getWord32be
unless (ver == extSecretPrefix) $ fail $
"Get: Invalid version for extended private key"
dep <- getWord8
par <- getWord32be
idx <- getWord32be
chn <- get
prv <- getPadPrvKey
return $ XPrvKey dep par idx chn prv
put k = do
putWord32be extSecretPrefix
putWord8 $ xPrvDepth k
putWord32be $ xPrvParent k
putWord32be $ xPrvIndex k
put $ xPrvChain k
putPadPrvKey $ xPrvKey k
instance Binary XPubKey where
get = do
ver <- getWord32be
unless (ver == extPubKeyPrefix) $ fail $
"Get: Invalid version for extended public key"
dep <- getWord8
par <- getWord32be
idx <- getWord32be
chn <- get
pub <- get
return $ XPubKey dep par idx chn pub
put k = do
putWord32be extPubKeyPrefix
putWord8 $ xPubDepth k
putWord32be $ xPubParent k
putWord32be $ xPubIndex k
put $ xPubChain k
put $ xPubKey k
{- Derivation helpers -}
-- | Cyclic list of all private soft child key derivations of a parent key
-- starting from an offset index.
prvSubKeys :: XPrvKey -> KeyIndex -> [(XPrvKey, KeyIndex)]
prvSubKeys k = map (\i -> (prvSubKey k i, i)) . cycleIndex
-- | Cyclic list of all public soft child key derivations of a parent key
-- starting from an offset index.
pubSubKeys :: XPubKey -> KeyIndex -> [(XPubKey, KeyIndex)]
pubSubKeys k = map (\i -> (pubSubKey k i, i)) . cycleIndex
-- | Cyclic list of all hard child key derivations of a parent key starting
-- from an offset index.
hardSubKeys :: XPrvKey -> KeyIndex -> [(XPrvKey, KeyIndex)]
hardSubKeys k = map (\i -> (hardSubKey k i, i)) . cycleIndex
-- | Derive an address from a public key and an index. The derivation type
-- is a public, soft derivation.
deriveAddr :: XPubKey -> KeyIndex -> (Address, PubKeyC)
deriveAddr k i =
(xPubAddr key, xPubKey key)
where
key = pubSubKey k i
-- | Cyclic list of all addresses derived from a public key starting from an
-- offset index. The derivation types are public, soft derivations.
deriveAddrs :: XPubKey -> KeyIndex -> [(Address, PubKeyC, KeyIndex)]
deriveAddrs k =
map f . cycleIndex
where
f i = let (a, key) = deriveAddr k i in (a, key, i)
-- | Derive a multisig address from a list of public keys, the number of
-- required signatures (m) and a derivation index. The derivation type is a
-- public, soft derivation.
deriveMSAddr :: [XPubKey] -> Int -> KeyIndex -> (Address, RedeemScript)
deriveMSAddr keys m i =
(scriptAddr rdm, rdm)
where
rdm = sortMulSig $ PayMulSig k m
k = map (toPubKeyG . xPubKey . flip pubSubKey i) keys
-- | Cyclic list of all multisig addresses derived from a list of public keys,
-- a number of required signatures (m) and starting from an offset index. The
-- derivation type is a public, soft derivation.
deriveMSAddrs :: [XPubKey] -> Int -> KeyIndex
-> [(Address, RedeemScript, KeyIndex)]
deriveMSAddrs keys m =
map f . cycleIndex
where
f i = let (a, rdm) = deriveMSAddr keys m i in (a, rdm, i)
cycleIndex :: KeyIndex -> [KeyIndex]
cycleIndex i
| i == 0 = cycle [0..0x7fffffff]
| i < 0x80000000 = cycle $ [i..0x7fffffff] ++ [0..(i-1)]
| otherwise = error $ "cycleIndex: invalid index " ++ (show i)
{- Derivation Paths -}
data Hard
data Generic
data Soft
type AllHardPath = DerivPathI Hard
type DerivPath = DerivPathI Generic
type SoftPath = DerivPathI Soft
class HardOrGeneric a
instance HardOrGeneric Hard
instance HardOrGeneric Generic
class GenericOrSoft a
instance GenericOrSoft Generic
instance GenericOrSoft Soft
-- | Data type representing a derivation path. Two constructors are provided
-- for specifying soft or hard derivations. The path /0/1'/2 for example can be
-- expressed as Deriv :/ 0 :| 1 :/ 2. The HardOrGeneric and GenericOrSoft type
-- classes are used to constrain the valid values for the phantom type t. If
-- you mix hard (:|) and soft (:/) paths, the only valid type for t is Generic.
-- Otherwise, t can be Hard if you only have hard derivation or Soft if you
-- only have soft derivations.
--
-- Using this type is as easy as writing the required derivation like in these
-- example:
-- Deriv :/ 0 :/ 1 :/ 2 :: SoftPath
-- Deriv :| 0 :| 1 :| 2 :: AllHardPath
-- Deriv :| 0 :/ 1 :/ 2 :: DerivPath
data DerivPathI t where
(:|) :: HardOrGeneric t => !(DerivPathI t) -> !KeyIndex -> DerivPathI t
(:/) :: GenericOrSoft t => !(DerivPathI t) -> !KeyIndex -> DerivPathI t
Deriv :: DerivPathI t
instance NFData (DerivPathI t) where
rnf p = case p of
next :| i -> rnf i `seq` rnf next
next :/ i -> rnf i `seq` rnf next
Deriv -> ()
instance Eq (DerivPathI t) where
(nextA :| iA) == (nextB :| iB) = iA == iB && nextA == nextB
(nextA :/ iA) == (nextB :/ iB) = iA == iB && nextA == nextB
Deriv == Deriv = True
_ == _ = False
-- TODO: Test
pathToStr :: DerivPathI t -> String
pathToStr p =
case p of
next :| i -> concat [ pathToStr next, "/", show i, "'" ]
next :/ i -> concat [ pathToStr next, "/", show i ]
Deriv -> ""
toHard :: DerivPathI t -> Maybe AllHardPath
toHard p = case p of
next :| i -> (:| i) <$> toHard next
Deriv -> Just Deriv
_ -> Nothing
toSoft :: DerivPathI t -> Maybe SoftPath
toSoft p = case p of
next :/ i -> (:/ i) <$> toSoft next
Deriv -> Just Deriv
_ -> Nothing
toGeneric :: DerivPathI t -> DerivPath
toGeneric p = case p of
next :/ i -> (toGeneric next) :/ i
next :| i -> (toGeneric next) :| i
Deriv -> Deriv
-- | Append two derivation paths together. The result will be a mixed
-- derivation path.
(++/) :: DerivPathI t1 -> DerivPathI t2 -> DerivPath
(++/) p1 p2 =
go id (toGeneric p2) $ toGeneric p1
where
go f p = case p of
next :/ i -> go (f . (:/ i)) $ toGeneric next
next :| i -> go (f . (:| i)) $ toGeneric next
_ -> f
-- | Derive a private key from a derivation path
derivePath :: DerivPathI t -> XPrvKey -> XPrvKey
derivePath path key =
go id path $ key
where
-- Build the full derivation function starting from the end
go f p = case p of
next :| i -> go (f . flip hardSubKey i) next
next :/ i -> go (f . flip prvSubKey i) next
_ -> f
-- | Derive a public key from a soft derivation path
derivePubPath :: SoftPath -> XPubKey -> XPubKey
derivePubPath path key =
go id path $ key
where
-- Build the full derivation function starting from the end
go f p = case p of
next :/ i -> go (f . flip pubSubKey i) next
_ -> f
-- TODO: Test
instance Show DerivPath where
showsPrec d p = showParen (d > 10) $
showString "DerivPath " . shows (pathToStr p)
-- TODO: Test
instance Show AllHardPath where
showsPrec d p = showParen (d > 10) $
showString "AllHardPath " . shows (pathToStr p)
-- TODO: Test
instance Show SoftPath where
showsPrec d p = showParen (d > 10) $
showString "SoftPath " . shows (pathToStr p)
-- TODO: Test
instance Read DerivPath where
readPrec = parens $ do
Read.Ident "DerivPath" <- lexP
Read.String str <- lexP
maybe pfail (return . getParsedPath) $ parsePath str
-- TODO: Test
instance Read AllHardPath where
readPrec = parens $ do
Read.Ident "AllHardPath" <- lexP
Read.String str <- lexP
maybe pfail return $ parseHard str
-- TODO: Test
instance Read SoftPath where
readPrec = parens $ do
Read.Ident "SoftPath" <- lexP
Read.String str <- lexP
maybe pfail return $ parseSoft str
-- TODO: Test
instance IsString ParsedPath where
fromString =
fromMaybe e . parsePath
where
e = error "Could not parse derivation path"
-- TODO: Test
instance IsString DerivPath where
fromString =
getParsedPath . fromMaybe e . parsePath
where
e = error "Could not parse derivation path"
-- TODO: Test
instance IsString AllHardPath where
fromString =
fromMaybe e . parseHard
where
e = error "Could not parse hard derivation path"
-- TODO: Test
instance IsString SoftPath where
fromString =
fromMaybe e . parseSoft
where
e = error "Could not parse soft derivation path"
instance FromJSON ParsedPath where
parseJSON = withText "ParsedPathPath" $ \str -> case parsePath $ cs str of
Just p -> return p
_ -> mzero
instance FromJSON DerivPath where
parseJSON = withText "DerivPath" $ \str -> case parsePath $ cs str of
Just p -> return $ getParsedPath p
_ -> mzero
instance FromJSON AllHardPath where
parseJSON = withText "AllHardPath" $ \str -> case parseHard $ cs str of
Just p -> return p
_ -> mzero
instance FromJSON SoftPath where
parseJSON = withText "SoftPath" $ \str -> case parseSoft $ cs str of
Just p -> return p
_ -> mzero
instance ToJSON (DerivPathI t) where
toJSON = String . cs . pathToStr
instance ToJSON ParsedPath where
toJSON (ParsedPrv p) = String . cs . ("m" ++) . pathToStr $ p
toJSON (ParsedPub p) = String . cs . ("M" ++) . pathToStr $ p
toJSON (ParsedEmpty p) = String . cs . ("" ++) . pathToStr $ p
{- Parsing derivation paths of the form m/1/2'/3 or M/1/2'/3 -}
data ParsedPath = ParsedPrv { getParsedPath :: !DerivPath }
| ParsedPub { getParsedPath :: !DerivPath }
| ParsedEmpty { getParsedPath :: !DerivPath }
deriving (Read, Show, Eq)
-- | Parse derivation path string for extended key.
-- Forms: “m/0'/2”, “M/2/3/4”.
parsePath :: String -> Maybe ParsedPath
parsePath str = do
res <- concatBip32Segments <$> mapM parseBip32PathIndex xs
case x of
"m" -> Just $ ParsedPrv res
"M" -> Just $ ParsedPub res
"" -> Just $ ParsedEmpty res
_ -> Nothing
where
(x : xs) = splitOn "/" str
concatBip32Segments :: [Bip32PathIndex] -> DerivPath
concatBip32Segments xs = foldl' appendBip32Segment Deriv xs
appendBip32Segment :: DerivPath -> Bip32PathIndex -> DerivPath
appendBip32Segment d (Bip32SoftIndex i) = d :/ i
appendBip32Segment d (Bip32HardIndex i) = d :| i
parseBip32PathIndex :: String -> Maybe Bip32PathIndex
parseBip32PathIndex segment = case reads segment of
[(i, "" )] -> guard (is31Bit i) >> ( return $ Bip32SoftIndex i )
[(i, "'")] -> guard (is31Bit i) >> ( return $ Bip32HardIndex i )
_ -> Nothing
data Bip32PathIndex = Bip32HardIndex KeyIndex | Bip32SoftIndex KeyIndex
deriving (Read,Show,Eq)
is31Bit :: (Integral a) => a -> Bool
is31Bit i = (i >=0 && i < 0x80000000)
-- Helper function to parse a hard path
parseHard :: String -> Maybe AllHardPath
parseHard = toHard . getParsedPath <=< parsePath
-- Helper function to parse a soft path
parseSoft :: String -> Maybe SoftPath
parseSoft = toSoft . getParsedPath <=< parsePath
data XKey = XPrv { getXPrvKey :: !XPrvKey }
| XPub { getXPubKey :: !XPubKey }
deriving (Eq, Show)
-- | Apply a parsed path to a private key to derive the new key defined in the
-- path. If the path starts with m/, a private key will be returned and if the
-- path starts with M/, a public key will be returned.
applyPath :: ParsedPath -> XKey -> Either String XKey
applyPath path key = case (path, key) of
(ParsedPrv _, XPrv k) -> return $ XPrv $ derivPrvF k
(ParsedPrv _, XPub _) -> Left "applyPath: Invalid public key"
(ParsedPub _, XPrv k) -> return $ XPub $ deriveXPubKey $ derivPrvF k
(ParsedPub _, XPub k) -> derivPubFE >>= \f -> return $ XPub $ f k
-- For empty parsed paths, we take a hint from the provided key
(ParsedEmpty _, XPrv k) -> return $ XPrv $ derivPrvF k
(ParsedEmpty _, XPub k) -> derivPubFE >>= \f -> return $ XPub $ f k
where
derivPrvF = goPrv id $ getParsedPath path
derivPubFE = goPubE id $ getParsedPath path
-- Build the full private derivation function starting from the end
goPrv f p = case p of
next :| i -> goPrv (f . flip hardSubKey i) next
next :/ i -> goPrv (f . flip prvSubKey i) next
Deriv -> f
-- Build the full public derivation function starting from the end
goPubE f p = case p of
next :/ i -> goPubE (f . flip pubSubKey i) next
Deriv -> Right f
_ -> Left "applyPath: Invalid hard derivation"
{- Helpers for derivation paths and addresses -}
-- | Derive an address from a given parent path.
derivePathAddr :: XPubKey -> SoftPath -> KeyIndex -> (Address, PubKeyC)
derivePathAddr key path i = deriveAddr (derivePubPath path key) i
-- | Cyclic list of all addresses derived from a given parent path and starting
-- from the given offset index.
derivePathAddrs :: XPubKey -> SoftPath -> KeyIndex
-> [(Address, PubKeyC, KeyIndex)]
derivePathAddrs key path i = deriveAddrs (derivePubPath path key) i
-- | Derive a multisig address from a given parent path. The number of required
-- signatures (m in m of n) is also needed.
derivePathMSAddr :: [XPubKey] -> SoftPath -> Int -> KeyIndex
-> (Address, RedeemScript)
derivePathMSAddr keys path m i =
deriveMSAddr (map (derivePubPath path) keys) m i
-- | Cyclic list of all multisig addresses derived from a given parent path and
-- starting from the given offset index. The number of required signatures
-- (m in m of n) is also needed.
derivePathMSAddrs :: [XPubKey] -> SoftPath -> Int -> KeyIndex
-> [(Address, RedeemScript, KeyIndex)]
derivePathMSAddrs keys path m i =
deriveMSAddrs (map (derivePubPath path) keys) m i
{- Utilities for extended keys -}
-- De-serialize HDW-specific private key
getPadPrvKey :: Get PrvKeyC
getPadPrvKey = do
pad <- getWord8
unless (pad == 0x00) $ fail $
"Private key must be padded with 0x00"
prvKeyGetMonad makePrvKeyC -- Compressed version
-- Serialize HDW-specific private key
putPadPrvKey :: PrvKeyC -> Put
putPadPrvKey p = putWord8 0x00 >> prvKeyPutMonad p
bsPadPrvKey :: PrvKeyC -> ByteString
bsPadPrvKey = runPut' . putPadPrvKey
|
tphyahoo/haskoin
|
haskoin-core/Network/Haskoin/Crypto/ExtendedKeys.hs
|
unlicense
| 25,150 | 0 | 14 | 6,016 | 6,281 | 3,309 | 2,972 | -1 | -1 |
{-#LANGUAGE DeriveDataTypeable#-}
module Data.P440.Domain.BNS where
import Data.P440.Domain.SimpleTypes
import Data.P440.Domain.ComplexTypes
import Data.P440.Domain.ComplexTypesZS (НаДатуИлиЗаПериод, СвБанкИлиСвУБР)
import Data.Typeable (Typeable)
import Data.Text (Text)
-- 2.11 Справка о наличии счетов
data Файл = Файл {
идЭС :: GUID
,версПрог :: Text
,телОтпр :: Text
,должнОтпр :: Text
,фамОтпр :: Text
,справНалич :: СправНалич
} deriving (Eq, Show, Typeable)
data СправНалич = СправНалич {
номСправ :: Text
,номЗапр :: Text
,датаПодпЗапр :: Date
,наимПодтв :: Maybe Text
,призНал :: Text
,датаПодп :: Date
,наДатуИлиЗаПериод :: НаДатуИлиЗаПериод
,свНО :: СвНО
,свБанкИлиУБР :: СвБанкИлиСвУБР
,свПл :: ПлЮЛИлиПлИП
,свСчет :: [СвСчет]
,свВклад :: [СвВклад]
,предБанка :: ПредБ
} deriving (Eq, Show, Typeable)
data ПлЮЛИлиПлИП = ПлЮЛ' ПлЮЛ
| ПлИП' ПлФЛ
deriving (Eq, Show, Typeable)
data СвСчет = СвСчет {
номСч :: НомСч
,видСч :: Text
,датаОткрСч :: Date
,датаЗакрСч :: Maybe Date
,кодВалСч :: Text
} deriving (Eq, Show, Typeable)
data СвВклад = СвВклад {
номВкл :: Text
,видВкл :: Text
,кодВалВклад :: Text
,датаОткрВклад :: Date
,датаИстВклад :: Maybe Date
,датаЗакрВклад :: Maybe Date
,суммаВкл :: Text
,проценты :: Text
} deriving (Eq, Show, Typeable)
|
Macil-dev/440P-old
|
src/Data/P440/Domain/BNS.hs
|
unlicense
| 2,023 | 78 | 8 | 457 | 759 | 422 | 337 | 50 | 0 |
module Main where
import Criterion.Main
import Control.Lens
import Control.Monad.Trans.Class ( lift )
import Control.Monad.Trans.Either ( EitherT(..)
, runEitherT
, hoistEither )
import Data.Default ( def )
import qualified Data.Vector as V
import Linear.V2
import TrueSkill ( Parameter(..)
, Player
, skills
, makeSkills
, fromMuSigma2
, sigmaOffense
, sigmaDefense
)
import TrueSkill.Autodiff hiding ( lift )
import Train
import Types
-- | Default player skill mean.
defaultMuOffense :: Floating d => d
defaultMuOffense = 3.0 / 11.0
-- | Default player skill standard deviation.
defaultSigmaOffense :: Floating d => d
defaultSigmaOffense = 0.1
-- | Default player skill mean.
defaultMuDefense :: Floating d => d
defaultMuDefense = 1.0 / 11.0
-- | Default player skill standard deviation.
defaultSigmaDefense :: Floating d => d
defaultSigmaDefense = 0.1
defaultParameter :: Floating d => Parameter d
defaultParameter = def
{ _sigmaOffense = defaultSigmaOffense / 5.0
, _sigmaDefense = defaultSigmaDefense / 5.0
}
defaultPlayer :: Floating d => Player d
defaultPlayer = skills .~ makeSkills
(fromMuSigma2 defaultMuOffense (defaultSigmaOffense**2))
(fromMuSigma2 defaultMuDefense (defaultSigmaDefense**2))
$ def
buildEval :: V.Vector Game -> V.Vector Game -> Double
buildEval trainData valData = value
where
value = objective 3 trainData valData
[ defaultParameter ^. sigmaOffense
, defaultParameter ^. sigmaDefense
, defaultMuOffense
, defaultSigmaOffense**2
, defaultMuDefense
, defaultSigmaDefense**2
]
gradEval :: V.Vector Game -> V.Vector Game -> (Double, V2 Double)
gradEval trainData valData = value `seq` grad `seq` (value, grad)
where
-- For testing with the `ad' package on hackage.
-- (value, grad) = grad' objective' [ defaultParameter ^. sigmaOffense
-- , defaultParameter ^. sigmaDefense
-- ]
-- objective' :: (Floating d, Ord d) => [d] -> d
-- objective' [a, b] = objective trainData valData [ a
-- , b
-- , defaultMuOffense
-- , defaultSigmaOffense^2
-- , defaultMuDefense
-- , defaultSigmaDefense^2
-- ]
AD value grad = objective 3 trainData valData initial
initial :: [AD]
initial = map (uncurry makeAD) $ zip [ defaultParameter ^. sigmaOffense
, defaultParameter ^. sigmaDefense
, defaultMuOffense
, defaultSigmaOffense^(2 :: Int)
, defaultMuDefense
, defaultSigmaDefense^(2 :: Int)
] [0..]
benchmark :: V.Vector Game -> V.Vector Game -> IO ()
benchmark trainData valData = defaultMain [
bgroup "TrueSkill" [ bench "grad" $ whnf (gradEval trainData) valData
, bench "eval" $ whnf (buildEval trainData) valData
]
]
main :: IO ()
main = do
r <- runEitherT $ do
trainData <- hoistEither =<<
(lift $
readGamesFromCsv "bundesliga/shuffled_train.csv")
valData <- hoistEither =<<
(lift $
readGamesFromCsv "bundesliga/shuffled_validation.csv")
lift $ benchmark trainData valData
-- lift $ print $ gradEval trainData valData
case r of
Left err -> putStrLn err
Right _ -> return ()
|
mkiefel/trueskill
|
bench_app.hs
|
bsd-2-clause
| 4,239 | 0 | 15 | 1,773 | 765 | 418 | 347 | 74 | 2 |
{-# LANGUAGE BangPatterns #-}
import Data.List
import Data.Maybe
data Mintree = Leaf !Integer !Integer
| Node !Integer !Integer !Integer !Mintree !Mintree deriving Show
tmin (Leaf m _) = m
tmin (Node m _ _ _ _) = m
buildtree xs = snd (buildtree' 0 (fromIntegral $ (length xs) - 1) xs)
buildtree' a b xs@(x:xs') =
if a == b
then (xs', Leaf x a)
else let c = a + (b - a) `div` 2
(xs', left) = buildtree' a c xs
(xs'', right) = buildtree' (c + 1) b xs'
in (xs'', Node (min (tmin left) (tmin right)) a b left right)
mm a Nothing = a
mm Nothing b = b
mm (Just a) (Just b) = Just $ min a b
findmin (Leaf m a') a b = if a' >= a && a' <= b then Just m else Nothing
findmin (Node m a' b' l r) a b =
if a <= a' && b' <= b
then Just m
else if b < a' || b' < a
then Nothing
else mm (findmin l a b) (findmin r a b)
tst t = do
l <- getLine
let (a:b:[]) = str2numlist l
putStrLn (show (fromJust $ findmin t a b))
main = do
l1 <- getLine
let (n:m:[]) = str2numlist l1
l2 <- getLine
let !t = buildtree $ str2numlist l2
mapM_ (const (tst t)) [1 .. m]
str2numlist :: String -> [Integer]
str2numlist = (map read) . explode
explode = unfoldr f
where f !str = let (!chunk, !rest) = span (/= ' ') str
in if null chunk
then Nothing
else if null rest
then Just (chunk, rest)
else Just (chunk, tail rest)
|
pbl64k/HackerRank-Contests
|
2014-02-07-FP/RangeMinimumQuery/rmq.accepted.hs
|
bsd-2-clause
| 1,587 | 0 | 13 | 590 | 739 | 371 | 368 | 58 | 4 |
-- | fluid_log is not present because it is a vararg function
-- and ffi to them is deprecated:
--
-- \"Note that for a C function defined to accept a variable number of arguments,
-- all arguments beyond the explicitly typed arguments suffer argument promotion.
-- However, because C permits the calling convention to be different for such
-- functions, a Haskell system will, in general, not be able to make use of
-- variable argument functions. Hence, their use is deprecated in portable code.\"
--
-- from <http://www.haskell.org/onlinereport/haskell2010/haskellch8.html>
-- (8.5.1 at the very end)
module Sound.Fluidsynth.Log
( LogLevel(..)
, Logger
, defaultLogger
, setLogger
) where
import Control.Monad.Trans(liftIO)
import Control.Exception(assert)
import Foreign.C.String(peekCString, withCString)
import Foreign.Ptr(nullPtr)
import Sound.Fluidsynth.Internal.Types
import Sound.Fluidsynth.Internal.FFI.Log
data LogLevel
= LogLevelPanic
| LogLevelError
| LogLevelWarning
| LogLevelInfo
| LogLevelDebug
llToC :: LogLevel -> C'fluid_log_level
llToC LogLevelPanic = c'FLUID_PANIC
llToC LogLevelError = c'FLUID_ERR
llToC LogLevelWarning = c'FLUID_WARN
llToC LogLevelInfo = c'FLUID_INFO
llToC LogLevelDebug = c'FLUID_DBG
llFromC :: C'fluid_log_level -> LogLevel
llFromC ll =
case () of
_ | ll == c'FLUID_PANIC -> LogLevelPanic
| ll == c'FLUID_ERR -> LogLevelError
| ll == c'FLUID_WARN -> LogLevelWarning
| ll == c'FLUID_INFO -> LogLevelInfo
| ll == c'FLUID_DBG -> LogLevelDebug
| otherwise -> assert False undefined
type Logger = LogLevel -> String -> FluidSynth ()
-- | Will just print to stderr
defaultLogger :: Logger
defaultLogger lvl str = FluidSynth $
liftIO $ withCString str $ \cstr ->
c'fluid_default_log_function (llToC lvl) cstr nullPtr
-- | Set custom 'Logger' for a specific 'LogLevel'
setLogger :: LogLevel
-> Logger -- ^ new 'Logger'
-> FluidSynth Logger -- ^ previous 'Logger'
setLogger ll logger = do
dataPtr <- fluidDataPtr
cback <- liftIO $ mk'fluid_log_function_t callback
prevcback <- liftIO $ c'fluid_set_log_function (llToC ll) cback dataPtr
return $ fromCallback $ mK'fluid_log_function_t prevcback
where callback cll cstr dataPtr = do
str <- peekCString cstr
runFluidMonad (logger (llFromC cll) str) dataPtr
fromCallback f ll' str = do
dataPtr <- fluidDataPtr
liftIO $ withCString str $ \cstr -> f (llToC ll') cstr dataPtr
|
projedi/fluidsynth-hs-complete
|
src/Sound/Fluidsynth/Log.hs
|
bsd-3-clause
| 2,534 | 0 | 13 | 515 | 522 | 275 | 247 | 51 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
module High.Functor.Bifunctor where
import High.Functor.PFunctor (PFunctor)
import High.Functor.QFunctor (QFunctor)
data Bifunctor :: ((* -> *) -> (* -> *) -> *) -> ((* -> *) -> (* -> *) -> *) -> ((* -> *) -> (* -> *) -> *) -> ((* -> *) -> (* -> *) -> (* -> *)) -> * where
Bifunctor ::
{ extend_pfunctor :: PFunctor cat1 dat pro
, extend_qfunctor :: QFunctor cat2 dat pro
} -> Bifunctor cat1 cat2 dat pro
|
Hexirp/untypeclass
|
src/High/Functor/Bifunctor.hs
|
bsd-3-clause
| 553 | 0 | 13 | 125 | 206 | 119 | 87 | 12 | 0 |
import System.Nemesis
import System.Nemesis.Utils
import Prelude hiding ((-))
main = run nemesis
nemesis = do
clean
[ "**/*.hi"
, "**/*.o"
, "manifest"
]
desc "dist"
task "dist" - do
sh "cabal clean"
sh "cabal configure"
sh "cabal sdist"
desc "watch hs"
task "watch-hs" - do
sh "find . -name '*.hs' | entr runghc Nemesis.hs emacs-restart"
desc "emacs-restart"
task "emacs-restart" - do
sh "emacsclient -e '(haskell-restart)'"
|
nfjinjing/nemesis
|
Nemesis.hs
|
bsd-3-clause
| 482 | 0 | 10 | 121 | 129 | 58 | 71 | 20 | 1 |
module Network.LambNyaa.Sink (
Sink (..), sink, sink_, printItem, seen, unseen
) where
import Data.Monoid
import Network.LambNyaa.Config
import Network.LambNyaa.Item
import Network.LambNyaa.Database
newtype Sink = Sink {unSink :: Config -> [Item] -> IO ()}
-- | Create a Sink from any IO action.
sink :: (Config -> [Item] -> IO ()) -> Sink
sink = Sink
-- | Create a Sink from any IO action; Config-less version.
sink_ :: ([Item] -> IO ()) -> Sink
sink_ f = sink $ const f
-- | Print an Item to stdout.
printItem :: Sink
printItem = sink_ $ mapM_ print
-- | Mark an item as seen.
seen :: Sink
seen = sink $ \cfg is -> withSQLite cfg $ \c -> do
mapM_ (\i -> markSeen (itmIdentifier i) True c) is
-- | Mark an item as seen.
unseen :: Sink
unseen = sink $ \cfg is -> withSQLite cfg $ \c -> do
mapM_ (\i -> markSeen (itmIdentifier i) False c) is
instance Monoid Sink where
mempty = sink_ . const $ return ()
mappend a b = sink $ \cfg is -> do
unSink a cfg is
unSink b cfg is
|
valderman/lambnyaa
|
Network/LambNyaa/Sink.hs
|
bsd-3-clause
| 999 | 0 | 16 | 219 | 368 | 201 | 167 | 24 | 1 |
module TaskList
( dispatch
) where
import Euclid
import FastDegree
import GeneratePrimeNumber
import qualified RSA as R
import Factorization
import CustomCodes
dispatch :: String -> [String] -> IO ()
dispatch "-extend-euclid" params = case params of
[a, b] -> print $ euc (read a) (read b)
_ -> printHelper
dispatch "-fast-degree" params = case params of
[n, a, e] -> print $ deg (read n) (read a) (read e)
_ -> printHelper
dispatch "-prime-factor" params = case params of
[n] -> do
prime <- genPrime (read n)
print prime
_ -> printHelper
dispatch "-rsa-params-factor" params = case params of
[n, k_name] -> do
params@(pub_k, pr_k) <- R.genParams (read n)
writeFile ("./.keys/" ++ k_name ++ "_rsa.pub") (show pub_k)
writeFile ("./.keys/" ++ k_name ++ "_rsa") (show pr_k)
print params
_ -> printHelper
dispatch "-encrypt" params = case params of
[k_name, text] -> do
pub_k <- readFile ("./.keys/" ++ k_name ++ "_rsa.pub")
print $ R.encrypt (read pub_k) text
_ -> printHelper
dispatch "-decrypt" params = case params of
[k_name, code] -> do
pr_k <- readFile ("./.keys/" ++ k_name ++ "_rsa")
print $ R.decrypt (read pr_k) (read code)
_ -> printHelper
dispatch "-second-task" params = case params of
[n, e, sw] -> do
pq <- fact (read n)
print pq
let p_key = R.calcPrivateKey pq (read e)
putStrLn $ customDecrypt p_key (read sw)
_ -> printHelper
dispatch "-help" _ = printHelper
dispatch _ _ = printHelper
printHelper :: IO ()
printHelper = putStr $ "cmd = [-extend-euclid | -fast-degree | -prime-factor | -rsa-params-facor | -encrypt | -decrypt | -second-task | -help]\n"
++ "in/out values = -extend-euclid {A} {B} -> {x} {y}\n"
++ " -fast-degree {N - modulo} {A - number} {E - degree} -> {r - result}\n"
++ " -prime-factor {N - bit count} -> {p - prime number}\n"
++ " -rsa-params-factor {N - bit count} {KEYS_NAME} -> {(pub_key, pr_key) - public and private keys with modulo}\n"
++ " -encrypt {KEYS_NAME} {TEXT} -> {code}\n"
++ " -decrypt {KEYS_NAME} {CODE} -> {text}\n"
++ " -second-task {N, E, SW} -> {(p, q) - factorization result} {text}\n"
|
GOGEN/rsa
|
app/TaskList.hs
|
bsd-3-clause
| 3,545 | 0 | 15 | 1,842 | 650 | 328 | 322 | 55 | 8 |
divide :: Double -> Double -> Double
divide 0 b = b
divide a b = b / a
|
YoshikuniJujo/funpaala
|
samples/07_polymorphic/divide0.hs
|
bsd-3-clause
| 71 | 0 | 6 | 19 | 38 | 19 | 19 | 3 | 1 |
{-# OPTIONS -cpp #-}
-- OPTIONS required for ghc-6.4.x compat, and must appear first
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -cpp #-}
{-# OPTIONS_NHC98 -cpp #-}
{-# OPTIONS_JHC -fcpp #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.InstallSymlink
-- Copyright : (c) Duncan Coutts 2008
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Managing installing binaries with symlinks.
-----------------------------------------------------------------------------
module Distribution.Client.InstallSymlink (
symlinkBinaries,
symlinkBinary,
) where
#if mingw32_HOST_OS || mingw32_TARGET_OS
import Distribution.Package (PackageIdentifier)
import Distribution.Client.InstallPlan (InstallPlan)
import Distribution.Client.Setup (InstallFlags)
import Distribution.Simple.Setup (ConfigFlags)
symlinkBinaries :: ConfigFlags
-> InstallFlags
-> InstallPlan
-> IO [(PackageIdentifier, String, FilePath)]
symlinkBinaries _ _ _ = return []
symlinkBinary :: FilePath -> FilePath -> String -> String -> IO Bool
symlinkBinary _ _ _ _ = fail "Symlinking feature not available on Windows"
#else
import Distribution.Client.Types
( SourcePackage(..), ConfiguredPackage(..) )
import Distribution.Client.Setup
( InstallFlags(installSymlinkBinDir) )
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Client.InstallPlan (InstallPlan)
import Distribution.Package
( PackageIdentifier, Package(packageId) )
import Distribution.Compiler
( CompilerId(..) )
import qualified Distribution.PackageDescription as PackageDescription
import Distribution.PackageDescription
( PackageDescription )
import Distribution.PackageDescription.Configuration
( finalizePackageDescription )
import Distribution.Simple.Setup
( ConfigFlags(..), fromFlag, fromFlagOrDefault, flagToMaybe )
import qualified Distribution.Simple.InstallDirs as InstallDirs
import System.Posix.Files
( getSymbolicLinkStatus, isSymbolicLink, createSymbolicLink
, removeLink )
import System.Directory
( canonicalizePath )
import System.FilePath
( (</>), splitPath, joinPath, isAbsolute )
import Prelude hiding (catch, ioError)
import System.IO.Error
( isDoesNotExistError, ioError )
import Control.Exception
( catch, assert )
import Data.Maybe
( catMaybes )
-- | We would like by default to install binaries into some location that is on
-- the user's PATH. For per-user installations on Unix systems that basically
-- means the @~/bin/@ directory. On the majority of platforms the @~/bin/@
-- directory will be on the user's PATH. However some people are a bit nervous
-- about letting a package manager install programs into @~/bin/@.
--
-- A comprimise solution is that instead of installing binaries directly into
-- @~/bin/@, we could install them in a private location under @~/.cabal/bin@
-- and then create symlinks in @~/bin/@. We can be careful when setting up the
-- symlinks that we do not overwrite any binary that the user installed. We can
-- check if it was a symlink we made because it would point to the private dir
-- where we install our binaries. This means we can install normally without
-- worrying and in a later phase set up symlinks, and if that fails then we
-- report it to the user, but even in this case the package is still in an ok
-- installed state.
--
-- This is an optional feature that users can choose to use or not. It is
-- controlled from the config file. Of course it only works on posix systems
-- with symlinks so is not available to Windows users.
--
symlinkBinaries :: ConfigFlags
-> InstallFlags
-> InstallPlan
-> IO [(PackageIdentifier, String, FilePath)]
symlinkBinaries configFlags installFlags plan =
case flagToMaybe (installSymlinkBinDir installFlags) of
Nothing -> return []
Just symlinkBinDir
| null exes -> return []
| otherwise -> do
publicBinDir <- canonicalizePath symlinkBinDir
-- TODO: do we want to do this here? :
-- createDirectoryIfMissing True publicBinDir
fmap catMaybes $ sequence
[ do privateBinDir <- pkgBinDir pkg
ok <- symlinkBinary
publicBinDir privateBinDir
publicExeName privateExeName
if ok
then return Nothing
else return (Just (pkgid, publicExeName,
privateBinDir </> privateExeName))
| (pkg, exe) <- exes
, let publicExeName = PackageDescription.exeName exe
privateExeName = prefix ++ publicExeName ++ suffix
pkgid = packageId pkg
prefix = substTemplate pkgid prefixTemplate
suffix = substTemplate pkgid suffixTemplate ]
where
exes =
[ (pkg, exe)
| InstallPlan.Installed cpkg _ <- InstallPlan.toList plan
, let pkg = pkgDescription cpkg
, exe <- PackageDescription.executables pkg
, PackageDescription.buildable (PackageDescription.buildInfo exe) ]
pkgDescription :: ConfiguredPackage -> PackageDescription
pkgDescription (ConfiguredPackage (SourcePackage _ pkg _) flags _) =
case finalizePackageDescription flags
(const True)
platform compilerId [] pkg of
Left _ -> error "finalizePackageDescription ConfiguredPackage failed"
Right (desc, _) -> desc
-- This is sadly rather complicated. We're kind of re-doing part of the
-- configuration for the package. :-(
pkgBinDir :: PackageDescription -> IO FilePath
pkgBinDir pkg = do
defaultDirs <- InstallDirs.defaultInstallDirs
compilerFlavor
(fromFlag (configUserInstall configFlags))
(PackageDescription.hasLibs pkg)
let templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault
defaultDirs (configInstallDirs configFlags)
absoluteDirs = InstallDirs.absoluteInstallDirs
(packageId pkg) compilerId InstallDirs.NoCopyDest
templateDirs
canonicalizePath (InstallDirs.bindir absoluteDirs)
substTemplate pkgid = InstallDirs.fromPathTemplate
. InstallDirs.substPathTemplate env
where env = InstallDirs.initialPathTemplateEnv pkgid compilerId
fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "")
prefixTemplate = fromFlagTemplate (configProgPrefix configFlags)
suffixTemplate = fromFlagTemplate (configProgSuffix configFlags)
platform = InstallPlan.planPlatform plan
compilerId@(CompilerId compilerFlavor _) = InstallPlan.planCompiler plan
symlinkBinary :: FilePath -- ^ The canonical path of the public bin dir
-- eg @/home/user/bin@
-> FilePath -- ^ The canonical path of the private bin dir
-- eg @/home/user/.cabal/bin@
-> String -- ^ The name of the executable to go in the public
-- bin dir, eg @foo@
-> String -- ^ The name of the executable to in the private bin
-- dir, eg @foo-1.0@
-> IO Bool -- ^ If creating the symlink was sucessful. @False@
-- if there was another file there already that we
-- did not own. Other errors like permission errors
-- just propagate as exceptions.
symlinkBinary publicBindir privateBindir publicName privateName = do
ok <- targetOkToOverwrite (publicBindir </> publicName)
(privateBindir </> privateName)
case ok of
NotOurFile -> return False
NotExists -> mkLink >> return True
OkToOverwrite -> rmLink >> mkLink >> return True
where
relativeBindir = makeRelative publicBindir privateBindir
mkLink = createSymbolicLink (relativeBindir </> privateName)
(publicBindir </> publicName)
rmLink = removeLink (publicBindir </> publicName)
-- | Check a filepath of a symlink that we would like to create to see if it
-- is ok. For it to be ok to overwrite it must either not already exist yet or
-- be a symlink to our target (in which case we can assume ownership).
--
targetOkToOverwrite :: FilePath -- ^ The filepath of the symlink to the private
-- binary that we would like to create
-> FilePath -- ^ The canonical path of the private binary.
-- Use 'canonicalizePath' to make this.
-> IO SymlinkStatus
targetOkToOverwrite symlink target = handleNotExist $ do
status <- getSymbolicLinkStatus symlink
if not (isSymbolicLink status)
then return NotOurFile
else do target' <- canonicalizePath symlink
-- This relies on canonicalizePath handling symlinks
if target == target'
then return OkToOverwrite
else return NotOurFile
where
handleNotExist action = catch action $ \ioexception ->
-- If the target doesn't exist then there's no problem overwriting it!
if isDoesNotExistError ioexception
then return NotExists
else ioError ioexception
data SymlinkStatus
= NotExists -- ^ The file doesn't exist so we can make a symlink.
| OkToOverwrite -- ^ A symlink already exists, though it is ours. We'll
-- have to delete it first bemore we make a new symlink.
| NotOurFile -- ^ A file already exists and it is not one of our existing
-- symlinks (either because it is not a symlink or because
-- it points somewhere other than our managed space).
deriving Show
-- | Take two canonical paths and produce a relative path to get from the first
-- to the second, even if it means adding @..@ path components.
--
makeRelative :: FilePath -> FilePath -> FilePath
makeRelative a b = assert (isAbsolute a && isAbsolute b) $
let as = splitPath a
bs = splitPath b
commonLen = length $ takeWhile id $ zipWith (==) as bs
in joinPath $ [ ".." | _ <- drop commonLen as ]
++ [ b' | b' <- drop commonLen bs ]
#endif
|
IreneKnapp/Faction
|
faction/Distribution/Client/InstallSymlink.hs
|
bsd-3-clause
| 10,568 | 0 | 10 | 2,844 | 170 | 105 | 65 | 142 | 4 |
module Day24 where
import Data.Function
import Data.List
partOne = product $ head $ sortOn product $ head $
groupBy ((==) `on` length) $
sortOn length $
filter (\xs -> genericSum xs == weightNeeded 3 && g xs 3) $ subsequences input
partTwo = product $ head $ sortOn product $ head $
groupBy ((==) `on` length) $
sortOn length $
filter (\xs -> genericSum xs == weightNeeded 4 && g2 xs) $ subsequences input
g xs i = any (\ys -> genericSum ys == weightNeeded i) $ subsequences (input \\ xs)
g2 xs = any (\ys -> genericSum ys == weightNeeded 4 && g ys 4) $ subsequences (input \\ xs)
genericSum = fromIntegral . sum
weightNeeded x = genericSum input / x
input :: [Integer]
input = map read $ words "1 3 5 11 13 17 19 23 29 31 41 43 47 53 59 61 67 71 73 79 83 89 97 101 103 107 109 113"
|
z0isch/advent-of-code
|
src/Day24.hs
|
bsd-3-clause
| 822 | 0 | 12 | 199 | 328 | 165 | 163 | 17 | 1 |
{-# OPTIONS_GHC -w #-}
{-# OPTIONS_HADDOCK hide #-}
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Text.PrettyPrint.Leijen
-- Copyright : Daan Leijen (c) 2000, http://www.cs.uu.nl/~daan
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : otakar.smrz cmu.edu
-- Stability : provisional
-- Portability : portable
--
-- Pretty print module based on Philip Wadler's \"prettier printer\"
--
-- @
-- \"A prettier printer\"
-- Draft paper, April 1997, revised March 1998.
-- <http://cm.bell-labs.com/cm/cs/who/wadler/papers/prettier/prettier.ps>
-- @
--
-- PPrint is an implementation of the pretty printing combinators
-- described by Philip Wadler (1997). In their bare essence, the
-- combinators of Wadler are not expressive enough to describe some
-- commonly occurring layouts. The PPrint library adds new primitives
-- to describe these layouts and works well in practice.
--
-- The library is based on a single way to concatenate documents,
-- which is associative and has both a left and right unit. This
-- simple design leads to an efficient and short implementation. The
-- simplicity is reflected in the predictable behaviour of the
-- combinators which make them easy to use in practice.
--
-- A thorough description of the primitive combinators and their
-- implementation can be found in Philip Wadler's paper
-- (1997). Additions and the main differences with his original paper
-- are:
--
-- * The nil document is called empty.
--
-- * The above combinator is called '<$>'. The operator '</>' is used
-- for soft line breaks.
--
-- * There are three new primitives: 'align', 'fill' and
-- 'fillBreak'. These are very useful in practice.
--
-- * Lots of other useful combinators, like 'fillSep' and 'list'.
--
-- * There are two renderers, 'renderPretty' for pretty printing and
-- 'renderCompact' for compact output. The pretty printing algorithm
-- also uses a ribbon-width now for even prettier output.
--
-- * There are two displayers, 'displayS' for strings and 'displayIO' for
-- file based output.
--
-- * There is a 'Pretty' class.
--
-- * The implementation uses optimised representations and strictness
-- annotations.
--
-- Full documentation available at <http://www.cs.uu.nl/~daan/download/pprint/pprint.html>.
-----------------------------------------------------------
module Text.PrettyPrint.Leijen (
-- * Documents
Doc, putDoc, hPutDoc,
-- * Basic combinators
empty, char, text, (<>), nest, line, linebreak, group, softline,
softbreak,
-- * Alignment
--
-- The combinators in this section can not be described by Wadler's
-- original combinators. They align their output relative to the
-- current output position - in contrast to @nest@ which always
-- aligns to the current nesting level. This deprives these
-- combinators from being \`optimal\'. In practice however they
-- prove to be very useful. The combinators in this section should
-- be used with care, since they are more expensive than the other
-- combinators. For example, @align@ shouldn't be used to pretty
-- print all top-level declarations of a language, but using @hang@
-- for let expressions is fine.
align, hang, indent, encloseSep, list, tupled, semiBraces,
-- * Operators
(<+>), (<$>), (</>), (<$$>), (<//>),
-- * List combinators
hsep, vsep, fillSep, sep, hcat, vcat, fillCat, cat, punctuate,
-- * Fillers
fill, fillBreak,
-- * Bracketing combinators
enclose, squotes, dquotes, parens, angles, braces, brackets,
-- * Character documents
lparen, rparen, langle, rangle, lbrace, rbrace, lbracket, rbracket,
squote, dquote, semi, colon, comma, space, dot, backslash, equals,
-- * Primitive type documents
string, int, integer, float, double, rational,
-- * Pretty class
Pretty(..),
-- * Rendering
SimpleDoc(..), renderPretty, renderCompact, displayS, displayIO
-- * Undocumented
, bool
, column, nesting, width
, isEmpty
) where
import System.IO (Handle,hPutStr,hPutChar,stdout)
#if __GLASGOW_HASKELL__ >= 710
import Prelude hiding ((<$>))
#endif
infixr 5 </>,<//>,<$>,<$$>
infixr 6 <>,<+>
-----------------------------------------------------------
-- list, tupled and semiBraces pretty print a list of
-- documents either horizontally or vertically aligned.
-----------------------------------------------------------
-- | The document @(list xs)@ comma separates the documents @xs@ and
-- encloses them in square brackets. The documents are rendered
-- horizontally if that fits the page. Otherwise they are aligned
-- vertically. All comma separators are put in front of the elements.
list :: [Doc] -> Doc
list = encloseSep lbracket rbracket comma
-- | The document @(tupled xs)@ comma separates the documents @xs@ and
-- encloses them in parenthesis. The documents are rendered
-- horizontally if that fits the page. Otherwise they are aligned
-- vertically. All comma separators are put in front of the elements.
tupled :: [Doc] -> Doc
tupled = encloseSep lparen rparen comma
-- | The document @(semiBraces xs)@ separates the documents @xs@ with
-- semi colons and encloses them in braces. The documents are rendered
-- horizontally if that fits the page. Otherwise they are aligned
-- vertically. All semi colons are put in front of the elements.
semiBraces :: [Doc] -> Doc
semiBraces = encloseSep lbrace rbrace semi
-- | The document @(encloseSep l r sep xs)@ concatenates the documents
-- @xs@ separated by @sep@ and encloses the resulting document by @l@
-- and @r@. The documents are rendered horizontally if that fits the
-- page. Otherwise they are aligned vertically. All separators are put
-- in front of the elements. For example, the combinator 'list' can be
-- defined with @encloseSep@:
--
-- > list xs = encloseSep lbracket rbracket comma xs
-- > test = text "list" <+> (list (map int [10,200,3000]))
--
-- Which is layed out with a page width of 20 as:
--
-- @
-- list [10,200,3000]
-- @
--
-- But when the page width is 15, it is layed out as:
--
-- @
-- list [10
-- ,200
-- ,3000]
-- @
encloseSep :: Doc -> Doc -> Doc -> [Doc] -> Doc
encloseSep left right sep ds
= case ds of
[] -> left <> right
[d] -> left <> d <> right
_ -> align (cat (zipWith (<>) (left : repeat sep) ds) <> right)
-----------------------------------------------------------
-- punctuate p [d1,d2,...,dn] => [d1 <> p,d2 <> p, ... ,dn]
-----------------------------------------------------------
-- | @(punctuate p xs)@ concatenates all documents in @xs@ with
-- document @p@ except for the last document.
--
-- > someText = map text ["words","in","a","tuple"]
-- > test = parens (align (cat (punctuate comma someText)))
--
-- This is layed out on a page width of 20 as:
--
-- @
-- (words,in,a,tuple)
-- @
--
-- But when the page width is 15, it is layed out as:
--
-- @
-- (words,
-- in,
-- a,
-- tuple)
-- @
--
-- (If you want put the commas in front of their elements instead of
-- at the end, you should use 'tupled' or, in general, 'encloseSep'.)
punctuate :: Doc -> [Doc] -> [Doc]
punctuate p [] = []
punctuate p [d] = [d]
punctuate p (d:ds) = (d <> p) : punctuate p ds
-----------------------------------------------------------
-- high-level combinators
-----------------------------------------------------------
-- | The document @(sep xs)@ concatenates all documents @xs@ either
-- horizontally with @(\<+\>)@, if it fits the page, or vertically with
-- @(\<$\>)@.
--
-- > sep xs = group (vsep xs)
sep :: [Doc] -> Doc
sep = group . vsep
-- | The document @(fillSep xs)@ concatenates documents @xs@
-- horizontally with @(\<+\>)@ as long as its fits the page, than
-- inserts a @line@ and continues doing that for all documents in
-- @xs@.
--
-- > fillSep xs = foldr (\<\/\>) empty xs
fillSep :: [Doc] -> Doc
fillSep = fold (</>)
-- | The document @(hsep xs)@ concatenates all documents @xs@
-- horizontally with @(\<+\>)@.
hsep :: [Doc] -> Doc
hsep = fold (<+>)
-- | The document @(vsep xs)@ concatenates all documents @xs@
-- vertically with @(\<$\>)@. If a 'group' undoes the line breaks
-- inserted by @vsep@, all documents are separated with a space.
--
-- > someText = map text (words ("text to lay out"))
-- >
-- > test = text "some" <+> vsep someText
--
-- This is layed out as:
--
-- @
-- some text
-- to
-- lay
-- out
-- @
--
-- The 'align' combinator can be used to align the documents under
-- their first element
--
-- > test = text "some" <+> align (vsep someText)
--
-- Which is printed as:
--
-- @
-- some text
-- to
-- lay
-- out
-- @
vsep :: [Doc] -> Doc
vsep = fold (<$>)
-- | The document @(cat xs)@ concatenates all documents @xs@ either
-- horizontally with @(\<\>)@, if it fits the page, or vertically with
-- @(\<$$\>)@.
--
-- > cat xs = group (vcat xs)
cat :: [Doc] -> Doc
cat = group . vcat
-- | The document @(fillCat xs)@ concatenates documents @xs@
-- horizontally with @(\<\>)@ as long as its fits the page, than inserts
-- a @linebreak@ and continues doing that for all documents in @xs@.
--
-- > fillCat xs = foldr (\<\/\/\>) empty xs
fillCat :: [Doc] -> Doc
fillCat = fold (<//>)
-- | The document @(hcat xs)@ concatenates all documents @xs@
-- horizontally with @(\<\>)@.
hcat :: [Doc] -> Doc
hcat = fold (<>)
-- | The document @(vcat xs)@ concatenates all documents @xs@
-- vertically with @(\<$$\>)@. If a 'group' undoes the line breaks
-- inserted by @vcat@, all documents are directly concatenated.
vcat :: [Doc] -> Doc
vcat = fold (<$$>)
fold f [] = empty
fold f ds = foldr1 f ds
-- | The document @(x \<\> y)@ concatenates document @x@ and document
-- @y@. It is an associative operation having 'empty' as a left and
-- right unit. (infixr 6)
(<>) :: Doc -> Doc -> Doc
x <> y = x `beside` y
-- | The document @(x \<+\> y)@ concatenates document @x@ and @y@ with a
-- @space@ in between. (infixr 6)
(<+>) :: Doc -> Doc -> Doc
x <+> y = x <> space <> y
-- | The document @(x \<\/\> y)@ concatenates document @x@ and @y@ with a
-- 'softline' in between. This effectively puts @x@ and @y@ either
-- next to each other (with a @space@ in between) or underneath each
-- other. (infixr 5)
(</>) :: Doc -> Doc -> Doc
x </> y = x <> softline <> y
-- | The document @(x \<\/\/\> y)@ concatenates document @x@ and @y@ with
-- a 'softbreak' in between. This effectively puts @x@ and @y@ either
-- right next to each other or underneath each other. (infixr 5)
(<//>) :: Doc -> Doc -> Doc
x <//> y = x <> softbreak <> y
-- | The document @(x \<$\> y)@ concatenates document @x@ and @y@ with a
-- 'line' in between. (infixr 5)
(<$>) :: Doc -> Doc -> Doc
x <$> y = x <> line <> y
-- | The document @(x \<$$\> y)@ concatenates document @x@ and @y@ with
-- a @linebreak@ in between. (infixr 5)
(<$$>) :: Doc -> Doc -> Doc
x <$$> y = x <> linebreak <> y
-- | The document @softline@ behaves like 'space' if the resulting
-- output fits the page, otherwise it behaves like 'line'.
--
-- > softline = group line
softline :: Doc
softline = group line
-- | The document @softbreak@ behaves like 'empty' if the resulting
-- output fits the page, otherwise it behaves like 'line'.
--
-- > softbreak = group linebreak
softbreak :: Doc
softbreak = group linebreak
-- | Document @(squotes x)@ encloses document @x@ with single quotes
-- \"'\".
squotes :: Doc -> Doc
squotes = enclose squote squote
-- | Document @(dquotes x)@ encloses document @x@ with double quotes
-- '\"'.
dquotes :: Doc -> Doc
dquotes = enclose dquote dquote
-- | Document @(braces x)@ encloses document @x@ in braces, \"{\" and
-- \"}\".
braces :: Doc -> Doc
braces = enclose lbrace rbrace
-- | Document @(parens x)@ encloses document @x@ in parenthesis, \"(\"
-- and \")\".
parens :: Doc -> Doc
parens = enclose lparen rparen
-- | Document @(angles x)@ encloses document @x@ in angles, \"\<\" and
-- \"\>\".
angles :: Doc -> Doc
angles = enclose langle rangle
-- | Document @(brackets x)@ encloses document @x@ in square brackets,
-- \"[\" and \"]\".
brackets :: Doc -> Doc
brackets = enclose lbracket rbracket
-- | The document @(enclose l r x)@ encloses document @x@ between
-- documents @l@ and @r@ using @(\<\>)@.
--
-- > enclose l r x = l <> x <> r
enclose :: Doc -> Doc -> Doc -> Doc
enclose l r x = l <> x <> r
-- | The document @lparen@ contains a left parenthesis, \"(\".
lparen :: Doc
lparen = char '('
-- | The document @rparen@ contains a right parenthesis, \")\".
rparen :: Doc
rparen = char ')'
-- | The document @langle@ contains a left angle, \"\<\".
langle :: Doc
langle = char '<'
-- | The document @rangle@ contains a right angle, \">\".
rangle :: Doc
rangle = char '>'
-- | The document @lbrace@ contains a left brace, \"{\".
lbrace :: Doc
lbrace = char '{'
-- | The document @rbrace@ contains a right brace, \"}\".
rbrace :: Doc
rbrace = char '}'
-- | The document @lbracket@ contains a left square bracket, \"[\".
lbracket :: Doc
lbracket = char '['
-- | The document @rbracket@ contains a right square bracket, \"]\".
rbracket :: Doc
rbracket = char ']'
-- | The document @squote@ contains a single quote, \"'\".
squote :: Doc
squote = char '\''
-- | The document @dquote@ contains a double quote, '\"'.
dquote :: Doc
dquote = char '"'
-- | The document @semi@ contains a semi colon, \";\".
semi :: Doc
semi = char ';'
-- | The document @colon@ contains a colon, \":\".
colon :: Doc
colon = char ':'
-- | The document @comma@ contains a comma, \",\".
comma :: Doc
comma = char ','
-- | The document @space@ contains a single space, \" \".
--
-- > x <+> y = x <> space <> y
space :: Doc
space = char ' '
-- | The document @dot@ contains a single dot, \".\".
dot :: Doc
dot = char '.'
-- | The document @backslash@ contains a back slash, \"\\\".
backslash :: Doc
backslash = char '\\'
-- | The document @equals@ contains an equal sign, \"=\".
equals :: Doc
equals = char '='
-----------------------------------------------------------
-- Combinators for prelude types
-----------------------------------------------------------
-- string is like "text" but replaces '\n' by "line"
-- | The document @(string s)@ concatenates all characters in @s@
-- using @line@ for newline characters and @char@ for all other
-- characters. It is used instead of 'text' whenever the text contains
-- newline characters.
string :: String -> Doc
string "" = empty
string ('\n':s) = line <> string s
string s = case (span (/='\n') s) of
(xs,ys) -> text xs <> string ys
bool :: Bool -> Doc
bool b = text (show b)
-- | The document @(int i)@ shows the literal integer @i@ using
-- 'text'.
int :: Int -> Doc
int i = text (show i)
-- | The document @(integer i)@ shows the literal integer @i@ using
-- 'text'.
integer :: Integer -> Doc
integer i = text (show i)
-- | The document @(float f)@ shows the literal float @f@ using
-- 'text'.
float :: Float -> Doc
float f = text (show f)
-- | The document @(double d)@ shows the literal double @d@ using
-- 'text'.
double :: Double -> Doc
double d = text (show d)
-- | The document @(rational r)@ shows the literal rational @r@ using
-- 'text'.
rational :: Rational -> Doc
rational r = text (show r)
-----------------------------------------------------------
-- overloading "pretty"
-----------------------------------------------------------
-- | The member @prettyList@ is only used to define the @instance Pretty
-- a => Pretty [a]@. In normal circumstances only the @pretty@ function
-- is used.
class Pretty a where
pretty :: a -> Doc
prettyList :: [a] -> Doc
prettyList = list . map pretty
instance Pretty a => Pretty [a] where
pretty = prettyList
instance Pretty Doc where
pretty = id
instance Pretty () where
pretty () = text "()"
instance Pretty Bool where
pretty b = bool b
instance Pretty Char where
pretty c = char c
prettyList s = string s
instance Pretty Int where
pretty i = int i
instance Pretty Integer where
pretty i = integer i
instance Pretty Float where
pretty f = float f
instance Pretty Double where
pretty d = double d
--instance Pretty Rational where
-- pretty r = rational r
instance (Pretty a,Pretty b) => Pretty (a,b) where
pretty (x,y) = tupled [pretty x, pretty y]
instance (Pretty a,Pretty b,Pretty c) => Pretty (a,b,c) where
pretty (x,y,z)= tupled [pretty x, pretty y, pretty z]
instance Pretty a => Pretty (Maybe a) where
pretty Nothing = empty
pretty (Just x) = pretty x
-----------------------------------------------------------
-- semi primitive: fill and fillBreak
-----------------------------------------------------------
-- | The document @(fillBreak i x)@ first renders document @x@. It
-- than appends @space@s until the width is equal to @i@. If the
-- width of @x@ is already larger than @i@, the nesting level is
-- increased by @i@ and a @line@ is appended. When we redefine @ptype@
-- in the previous example to use @fillBreak@, we get a useful
-- variation of the previous output:
--
-- > ptype (name,tp)
-- > = fillBreak 6 (text name) <+> text "::" <+> text tp
--
-- The output will now be:
--
-- @
-- let empty :: Doc
-- nest :: Int -> Doc -> Doc
-- linebreak
-- :: Doc
-- @
fillBreak :: Int -> Doc -> Doc
fillBreak f x = width x (\w ->
if (w > f) then nest f linebreak
else text (spaces (f - w)))
-- | The document @(fill i x)@ renders document @x@. It than appends
-- @space@s until the width is equal to @i@. If the width of @x@ is
-- already larger, nothing is appended. This combinator is quite
-- useful in practice to output a list of bindings. The following
-- example demonstrates this.
--
-- > types = [("empty","Doc")
-- > ,("nest","Int -> Doc -> Doc")
-- > ,("linebreak","Doc")]
-- >
-- > ptype (name,tp)
-- > = fill 6 (text name) <+> text "::" <+> text tp
-- >
-- > test = text "let" <+> align (vcat (map ptype types))
--
-- Which is layed out as:
--
-- @
-- let empty :: Doc
-- nest :: Int -> Doc -> Doc
-- linebreak :: Doc
-- @
fill :: Int -> Doc -> Doc
fill f d = width d (\w ->
if (w >= f) then empty
else text (spaces (f - w)))
width :: Doc -> (Int -> Doc) -> Doc
width d f = column (\k1 -> d <> column (\k2 -> f (k2 - k1)))
-----------------------------------------------------------
-- semi primitive: Alignment and indentation
-----------------------------------------------------------
-- | The document @(indent i x)@ indents document @x@ with @i@ spaces.
--
-- > test = indent 4 (fillSep (map text
-- > (words "the indent combinator indents these words !")))
--
-- Which lays out with a page width of 20 as:
--
-- @
-- the indent
-- combinator
-- indents these
-- words !
-- @
indent :: Int -> Doc -> Doc
indent i d = hang i (text (spaces i) <> d)
-- | The hang combinator implements hanging indentation. The document
-- @(hang i x)@ renders document @x@ with a nesting level set to the
-- current column plus @i@. The following example uses hanging
-- indentation for some text:
--
-- > test = hang 4 (fillSep (map text
-- > (words "the hang combinator indents these words !")))
--
-- Which lays out on a page with a width of 20 characters as:
--
-- @
-- the hang combinator
-- indents these
-- words !
-- @
--
-- The @hang@ combinator is implemented as:
--
-- > hang i x = align (nest i x)
hang :: Int -> Doc -> Doc
hang i d = align (nest i d)
-- | The document @(align x)@ renders document @x@ with the nesting
-- level set to the current column. It is used for example to
-- implement 'hang'.
--
-- As an example, we will put a document right above another one,
-- regardless of the current nesting level:
--
-- > x $$ y = align (x <$> y)
--
-- > test = text "hi" <+> (text "nice" $$ text "world")
--
-- which will be layed out as:
--
-- @
-- hi nice
-- world
-- @
align :: Doc -> Doc
align d = column (\k ->
nesting (\i -> nest (k - i) d)) --nesting might be negative :-)
-----------------------------------------------------------
-- Primitives
-----------------------------------------------------------
-- | The abstract data type @Doc@ represents pretty documents.
--
-- @Doc@ is an instance of the 'Show' class. @(show doc)@ pretty
-- prints document @doc@ with a page width of 100 characters and a
-- ribbon width of 40 characters.
--
-- > show (text "hello" <$> text "world")
--
-- Which would return the string \"hello\\nworld\", i.e.
--
-- @
-- hello
-- world
-- @
data Doc = Empty
| Char Char -- invariant: char is not '\n'
| Text !Int String -- invariant: text doesn't contain '\n'
| Line !Bool -- True <=> when undone by group, do not insert a space
| Cat Doc Doc
| Nest !Int Doc
| Union Doc Doc -- invariant: first lines of first doc longer than the first lines of the second doc
| Column (Int -> Doc)
| Nesting (Int -> Doc)
isEmpty :: Doc -> Bool
isEmpty Empty = True
isEmpty _ = False
-- | The data type @SimpleDoc@ represents rendered documents and is
-- used by the display functions.
--
-- The @Int@ in @SText@ contains the length of the string. The @Int@
-- in @SLine@ contains the indentation for that line. The library
-- provides two default display functions 'displayS' and
-- 'displayIO'. You can provide your own display function by writing a
-- function from a @SimpleDoc@ to your own output format.
data SimpleDoc = SEmpty
| SChar Char SimpleDoc
| SText !Int String SimpleDoc
| SLine !Int SimpleDoc
-- | The empty document is, indeed, empty. Although @empty@ has no
-- content, it does have a \'height\' of 1 and behaves exactly like
-- @(text \"\")@ (and is therefore not a unit of @\<$\>@).
empty :: Doc
empty = Empty
-- | The document @(char c)@ contains the literal character @c@. The
-- character shouldn't be a newline (@'\n'@), the function 'line'
-- should be used for line breaks.
char :: Char -> Doc
char '\n' = line
char c = Char c
-- | The document @(text s)@ contains the literal string @s@. The
-- string shouldn't contain any newline (@'\n'@) characters. If the
-- string contains newline characters, the function 'string' should be
-- used.
text :: String -> Doc
text "" = Empty
text s = Text (length s) s
-- | The @line@ document advances to the next line and indents to the
-- current nesting level. Document @line@ behaves like @(text \" \")@
-- if the line break is undone by 'group'.
line :: Doc
line = Line False
-- | The @linebreak@ document advances to the next line and indents to
-- the current nesting level. Document @linebreak@ behaves like
-- 'empty' if the line break is undone by 'group'.
linebreak :: Doc
linebreak = Line True
beside x y = Cat x y
-- | The document @(nest i x)@ renders document @x@ with the current
-- indentation level increased by i (See also 'hang', 'align' and
-- 'indent').
--
-- > nest 2 (text "hello" <$> text "world") <$> text "!"
--
-- outputs as:
--
-- @
-- hello
-- world
-- !
-- @
nest :: Int -> Doc -> Doc
nest i x = Nest i x
column, nesting :: (Int -> Doc) -> Doc
column f = Column f
nesting f = Nesting f
-- | The @group@ combinator is used to specify alternative
-- layouts. The document @(group x)@ undoes all line breaks in
-- document @x@. The resulting line is added to the current line if
-- that fits the page. Otherwise, the document @x@ is rendered without
-- any changes.
group :: Doc -> Doc
group x = Union (flatten x) x
flatten :: Doc -> Doc
flatten (Cat x y) = Cat (flatten x) (flatten y)
flatten (Nest i x) = Nest i (flatten x)
flatten (Line break) = if break then Empty else Text 1 " "
flatten (Union x y) = flatten x
flatten (Column f) = Column (flatten . f)
flatten (Nesting f) = Nesting (flatten . f)
flatten other = other --Empty,Char,Text
-----------------------------------------------------------
-- Renderers
-----------------------------------------------------------
-----------------------------------------------------------
-- renderPretty: the default pretty printing algorithm
-----------------------------------------------------------
-- list of indentation/document pairs; saves an indirection over [(Int,Doc)]
data Docs = Nil
| Cons !Int Doc Docs
-- | This is the default pretty printer which is used by 'show',
-- 'putDoc' and 'hPutDoc'. @(renderPretty ribbonfrac width x)@ renders
-- document @x@ with a page width of @width@ and a ribbon width of
-- @(ribbonfrac * width)@ characters. The ribbon width is the maximal
-- amount of non-indentation characters on a line. The parameter
-- @ribbonfrac@ should be between @0.0@ and @1.0@. If it is lower or
-- higher, the ribbon width will be 0 or @width@ respectively.
renderPretty :: Float -> Int -> Doc -> SimpleDoc
renderPretty rfrac w x
= best 0 0 (Cons 0 x Nil)
where
-- r :: the ribbon width in characters
r = max 0 (min w (round (fromIntegral w * rfrac)))
-- best :: n = indentation of current line
-- k = current column
-- (ie. (k >= n) && (k - n == count of inserted characters)
best n k Nil = SEmpty
best n k (Cons i d ds)
= case d of
Empty -> best n k ds
Char c -> let k' = k+1 in seq k' (SChar c (best n k' ds))
Text l s -> let k' = k+l in seq k' (SText l s (best n k' ds))
Line _ -> SLine i (best i i ds)
Cat x y -> best n k (Cons i x (Cons i y ds))
Nest j x -> let i' = i+j in seq i' (best n k (Cons i' x ds))
Union x y -> nicest n k (best n k (Cons i x ds))
(best n k (Cons i y ds))
Column f -> best n k (Cons i (f k) ds)
Nesting f -> best n k (Cons i (f i) ds)
--nicest :: r = ribbon width, w = page width,
-- n = indentation of current line, k = current column
-- x and y, the (simple) documents to chose from.
-- precondition: first lines of x are longer than the first lines of y.
nicest n k x y | fits width x = x
| otherwise = y
where
width = min (w - k) (r - k + n)
fits w x | w < 0 = False
fits w SEmpty = True
fits w (SChar c x) = fits (w - 1) x
fits w (SText l s x) = fits (w - l) x
fits w (SLine i x) = True
-----------------------------------------------------------
-- renderCompact: renders documents without indentation
-- fast and fewer characters output, good for machines
-----------------------------------------------------------
-- | @(renderCompact x)@ renders document @x@ without adding any
-- indentation. Since no \'pretty\' printing is involved, this
-- renderer is very fast. The resulting output contains fewer
-- characters than a pretty printed version and can be used for output
-- that is read by other programs.
renderCompact :: Doc -> SimpleDoc
renderCompact x
= scan 0 [x]
where
scan k [] = SEmpty
scan k (d:ds) = case d of
Empty -> scan k ds
Char c -> let k' = k+1 in seq k' (SChar c (scan k' ds))
Text l s -> let k' = k+l in seq k' (SText l s (scan k' ds))
Line _ -> SLine 0 (scan 0 ds)
Cat x y -> scan k (x:y:ds)
Nest j x -> scan k (x:ds)
Union x y -> scan k (y:ds)
Column f -> scan k (f k:ds)
Nesting f -> scan k (f 0:ds)
-----------------------------------------------------------
-- Displayers: displayS and displayIO
-----------------------------------------------------------
-- | @(displayS simpleDoc)@ takes the output @simpleDoc@ from a
-- rendering function and transforms it to a 'ShowS' type (for use in
-- the 'Show' class).
--
-- > showWidth :: Int -> Doc -> String
-- > showWidth w x = displayS (renderPretty 0.4 w x) ""
displayS :: SimpleDoc -> ShowS
displayS SEmpty = id
displayS (SChar c x) = showChar c . displayS x
displayS (SText l s x) = showString s . displayS x
displayS (SLine i x) = showString ('\n':indentation i) . displayS x
-- | @(displayIO handle simpleDoc)@ writes @simpleDoc@ to the file
-- handle @handle@. This function is used for example by 'hPutDoc':
--
-- > hPutDoc handle doc = displayIO handle (renderPretty 0.4 100 doc)
displayIO :: Handle -> SimpleDoc -> IO ()
displayIO handle simpleDoc
= display simpleDoc
where
display SEmpty = return ()
display (SChar c x) = do{ hPutChar handle c; display x}
display (SText l s x) = do{ hPutStr handle s; display x}
display (SLine i x) = do{ hPutStr handle ('\n':indentation i); display x}
-----------------------------------------------------------
-- default pretty printers: show, putDoc and hPutDoc
-----------------------------------------------------------
instance Show Doc where
showsPrec d doc = displayS (renderPretty 0.4 80 doc)
-- | The action @(putDoc doc)@ pretty prints document @doc@ to the
-- standard output, with a page width of 100 characters and a ribbon
-- width of 40 characters.
--
-- > main :: IO ()
-- > main = do{ putDoc (text "hello" <+> text "world") }
--
-- Which would output
--
-- @
-- hello world
-- @
putDoc :: Doc -> IO ()
putDoc doc = hPutDoc stdout doc
-- | @(hPutDoc handle doc)@ pretty prints document @doc@ to the file
-- handle @handle@ with a page width of 100 characters and a ribbon
-- width of 40 characters.
--
-- > main = do{ handle <- openFile "MyFile" WriteMode
-- > ; hPutDoc handle (vcat (map text
-- > ["vertical","text"]))
-- > ; hClose handle
-- > }
hPutDoc :: Handle -> Doc -> IO ()
hPutDoc handle doc = displayIO handle (renderPretty 0.4 80 doc)
-----------------------------------------------------------
-- insert spaces
-- "indentation" used to insert tabs but tabs seem to cause
-- more trouble than they solve :-)
-----------------------------------------------------------
spaces n | n <= 0 = ""
| otherwise = replicate n ' '
indentation n = spaces n
--indentation n | n >= 8 = '\t' : indentation (n-8)
-- | otherwise = spaces n
-- LocalWords: PPrint combinators Wadler Wadler's encloseSep
|
osa1/language-lua
|
src/Text/PrettyPrint/Leijen.hs
|
bsd-3-clause
| 31,470 | 0 | 16 | 7,804 | 4,780 | 2,747 | 2,033 | 303 | 10 |
module Karamaan.Opaleye.Operators (operatorName) where
-- TODO vv I put this take 5 in here because the query strings were getting
-- too long and postgres was complaining that it was truncating them.
-- This is really just a temporary fix, because I'd like to keep the
-- possibility of long names but postprocess the PrimQuery to shorten
-- them before sending them to postgres.
operatorName :: String -> String -> String -> String
operatorName left opName right = concat [t left, "_", opName, "_", t right]
where t = take 5
|
karamaan/karamaan-opaleye
|
Karamaan/Opaleye/Operators.hs
|
bsd-3-clause
| 530 | 0 | 7 | 94 | 80 | 46 | 34 | 4 | 1 |
{- | This module provides the /Simplify RHS/ processor.
@
|- <simp(S#) / simp(W#) + W, Q, T#> :f
------------------------------------------
|- <S# / W# + W, Q, T#> :f
@
, where @simp(R#)@ removes @ri@ from right-hand sides @c_n(r_1,...,r_n)@ if no instance of @ri@ can be rewritten, ie. if
there is no outgoing edge @i@.
-}
module Tct.Trs.Processor.DP.DPGraph.SimplifyRHS
( simplifyRHSDeclaration
, simplifyRHS
) where
import qualified Data.List as L (partition)
import Data.Maybe (fromMaybe)
import qualified Data.Rewriting.Rule as R (Rule (..))
import qualified Data.Rewriting.Term as R
import qualified Tct.Core.Common.Pretty as PP
import qualified Tct.Core.Common.Xml as Xml
import qualified Tct.Core.Data as T
import Tct.Common.ProofCombinators
import Tct.Trs.Data
import Tct.Trs.Data.DependencyGraph
import qualified Tct.Trs.Data.Problem as Prob
import qualified Tct.Trs.Data.Signature as Sig
import qualified Tct.Trs.Data.Symbol as Symb
import qualified Tct.Trs.Data.Rules as RS
data SimplifyRHS = SimplifyRHS deriving Show
data SimplifyRHSProof
= SimplifyRHSProof
{ wdg_ :: DG F V
, simplified_ :: [R.Rule F V] }
| SimplifyRHSFail
deriving Show
instance T.Processor SimplifyRHS where
type ProofObject SimplifyRHS = ApplicationProof SimplifyRHSProof
type In SimplifyRHS = Trs
type Out SimplifyRHS = Trs
execute SimplifyRHS prob =
maybe simpRHS (\s -> T.abortWith (Inapplicable s :: ApplicationProof SimplifyRHSProof)) (Prob.isDTProblem' prob)
where
simpRHS
| null simplified = T.abortWith (Applicable SimplifyRHSFail)
| otherwise = T.succeedWith1 (Applicable proof) T.fromId nprob
where
wdg = Prob.dependencyGraph prob
-- TODO: MS: this is not optimal;
-- I assumed that all my rhs have compound symbols after the DP transformation; this is not true after the
-- decomposeDG transformation. We can now have rhs of length one which are removed (in practice they are
-- probably already removed before); ie which should be replaced with fresh compound symbols. Either make
-- sure that we have the appropriate format or introduce fresh compound symbols here
elims = [ (isStrict cn, (r, elimRule n r)) | (n,cn) <- lnodes wdg, let r = theRule cn ]
elimRule n (R.Rule l (R.Fun f rs))
| not (Symb.isCompoundFun f) = Nothing
-- | not (Prob.isCompoundf f) = error $ "SimplifyRHS.elim: not a compound symbol: " ++ show f
| otherwise = if length rs' == length rs then Nothing else Just $ R.Rule l (R.Fun f rs')
where
rs' = [ ri | (i,ri) <- zip [1..] rs, i `elem` succs]
succs = [ j | (_,_,j) <- lsuccessors wdg n]
elimRule _ _ = Nothing
(stricts,weaks) = L.partition fst elims
toTrs rs = RS.fromList [ r | (_,(r1, mr2)) <- rs, let r = r1 `fromMaybe` mr2]
simplified = [ r | (_,(_, Just r)) <- elims ]
nprob = Prob.sanitiseDPGraph $ prob
{ Prob.strictDPs = toTrs stricts
, Prob.weakDPs = toTrs weaks
, Prob.signature = foldr updateCompound (Prob.signature prob) simplified }
where
updateCompound (R.Rule _ (R.Fun f rs)) acc = Sig.setArity (length rs) f acc
updateCompound _ acc = acc
proof = SimplifyRHSProof
{ wdg_ = wdg
, simplified_ = simplified }
--- * instances ------------------------------------------------------------------------------------------------------
simplifyRHSDeclaration :: T.Declaration ('[] T.:-> TrsStrategy)
simplifyRHSDeclaration = T.declare "simplifyRHS" desc () (T.Apply SimplifyRHS) where
desc =
[ "Simplify right hand sides of dependency pairs by removing marked subterms "
, "whose root symbols are undefined."
, "Only applicable if the strict component is empty." ]
-- | Simplifies right-hand sides of dependency pairs.
-- Removes r_i from right-hand side @c_n(r_1,...,r_n)@ if no instance of
-- r_i can be rewritten.
--
-- Only applicable on DP-problems as obtained by 'dependencyPairs' or 'dependencyTuples'. Also
-- not applicable when @strictTrs prob \= RS.empty@.
simplifyRHS :: TrsStrategy
simplifyRHS = T.declFun simplifyRHSDeclaration
--- * proofdata ------------------------------------------------------------------------------------------------------
instance PP.Pretty SimplifyRHSProof where
pretty SimplifyRHSFail = PP.text "No rule was simplified."
pretty p@SimplifyRHSProof{} = PP.vcat
[ PP.text "Consider the dependency graph"
, PP.indent 2 $ PP.pretty (wdg_ p)
, PP.text "Due to missing edges in the depndency graph, the right-hand sides of following rules could be simplified:"
, PP.indent 2 $ PP.pretty (RS.fromList $ simplified_ p) ]
instance Xml.Xml SimplifyRHSProof where
toXml SimplifyRHSFail = Xml.elt "simpRHS" []
toXml p@SimplifyRHSProof{} = Xml.elt "simpRHS"
[ Xml.toXml (wdg_ p)
, Xml.elt "simplified" $ map Xml.toXml (simplified_ p) ]
|
ComputationWithBoundedResources/tct-trs
|
src/Tct/Trs/Processor/DP/DPGraph/SimplifyRHS.hs
|
bsd-3-clause
| 5,310 | 0 | 17 | 1,381 | 1,128 | 627 | 501 | -1 | -1 |
{- |
Module : Ptk.Journal
Description : `ptk journal` command tree for the puffy toolkit
Copyright : 2014, Peter Harpending
License : BSD3
Maintainer : Peter Harpending <[email protected]>
Stability : experimental
Portability : Linux
-}
module Ptk.Journal (jTree, journalTree, journalHelp) where
import Data.Traversable
import PuffyTools.Journal
import Ptk.Journal.AddEntry
import Ptk.Journal.Cat
import Ptk.Journal.List
import Ptk.Journal.ListEntries
import Ptk.Journal.New
import System.Console.Argument
import System.Console.Command
import System.Console.Program
subCommandList = [ journalAddEntryTree
, journalAETree
, journalCatTree
, journalListTree
, journalLsTree
, journalListEntriesTree
, journalLeTree
, journalNewTree
, journalHelpTree
]
journalTree :: Commands IO
journalTree = Node journalCommand subCommandList
where
journalCommand = Command "journal" "Do things with Journals" journalHelp
jTree = Node jCommand subCommandList
where jCommand = Command "j" "Same as journal." journalHelp
journalHelpTree = Node (Command "help" "Show help for the journal module" journalHelp) []
journalHelp = io $ showUsage journalTree
|
pharpend/puffytools
|
ptk/Ptk/Journal.hs
|
bsd-3-clause
| 1,424 | 0 | 7 | 423 | 198 | 116 | 82 | 27 | 1 |
module Ivory.BSP.STM32.Peripheral.UART
( module Ivory.BSP.STM32.Peripheral.UART.Peripheral
, module Ivory.BSP.STM32.Peripheral.UART.Regs
, module Ivory.BSP.STM32.Peripheral.UART.Types
) where
import Ivory.BSP.STM32.Peripheral.UART.Peripheral
import Ivory.BSP.STM32.Peripheral.UART.Regs
import Ivory.BSP.STM32.Peripheral.UART.Types
|
GaloisInc/ivory-tower-stm32
|
ivory-bsp-stm32/src/Ivory/BSP/STM32/Peripheral/UART.hs
|
bsd-3-clause
| 342 | 0 | 5 | 30 | 68 | 53 | 15 | 7 | 0 |
module Main where
-- This is a Haskell translation of the official GLFW quick example
-- found at <http://www.glfw.org/docs/3.0/quick.html#quick_example>
-- using the GLFW-b library, version 1.x
-- I tried hard to keep the same structure so that it is simple
-- enough to go back and forth between the C version and the
-- Haskell one, while preserving some usual haskell tricks
-- If you have any comment, bug report, or any other kind of
-- feedback, feel free to shoot me an email at
-- alpmestan at gmail dot com
import Control.Monad (unless, when)
import Graphics.Rendering.OpenGL
import qualified Graphics.UI.GLFW as G
import System.Exit
import System.IO
-- tiny utility functions, in the same spirit as 'maybe' or 'either'
-- makes the code a wee bit cleaner
bool :: Bool -> a -> a -> a
bool b falseRes trueRes = if b then trueRes else falseRes
unless' :: Monad m => m Bool -> m () -> m ()
unless' action falseAction = do
b <- action
unless b falseAction
maybe' :: Maybe a -> b -> (a -> b) -> b
maybe' m nothingRes f = case m of
Nothing -> nothingRes
Just x -> f x
-- type ErrorCallback = Error -> String -> IO ()
errorCallback :: G.ErrorCallback
errorCallback err description = hPutStrLn stderr description
keyCallback :: G.KeyCallback
keyCallback window key scancode action mods = when (key == G.Key'Escape && action == G.KeyState'Pressed) $
G.setWindowShouldClose window True
main :: IO ()
main = do
G.setErrorCallback (Just errorCallback)
successfulInit <- G.init
-- if init failed, we exit the program
bool successfulInit exitFailure $ do
mw <- G.createWindow 640 480 "Simple example, haskell style" Nothing Nothing
maybe' mw (G.terminate >> exitFailure) $ \window -> do
G.makeContextCurrent mw
G.setKeyCallback window (Just keyCallback)
mainLoop window
G.destroyWindow window
G.terminate
exitSuccess
mainLoop :: G.Window -> IO ()
mainLoop w = unless' (G.windowShouldClose w) $ do
(width, height) <- G.getFramebufferSize w
let ratio = fromIntegral width / fromIntegral height
viewport $= (Position 0 0, Size (fromIntegral width) (fromIntegral height))
clear [ColorBuffer]
matrixMode $= Projection
loadIdentity
ortho (negate ratio) ratio (negate 1.0) 1.0 1.0 (negate 1.0)
matrixMode $= Modelview 0
loadIdentity
-- this is bad, but keeps the logic of the original example I guess
Just t <- G.getTime
rotate ((realToFrac t) * 50) $ (Vector3 0 0 1 :: Vector3 GLdouble)
renderPrimitive Triangles $ do
color (Color3 1 0 0 :: Color3 GLdouble)
vertex (Vertex3 (negate 0.6) (negate 0.4) 0 :: Vertex3 GLdouble)
color (Color3 0 1 0 :: Color3 GLdouble)
vertex (Vertex3 0.6 (negate 0.4) 0 :: Vertex3 GLdouble)
color (Color3 0 0 1 :: Color3 GLdouble)
vertex (Vertex3 0 0.6 0 :: Vertex3 GLdouble)
G.swapBuffers w
G.pollEvents
mainLoop w
|
alpmestan/glfw-b-quick-example
|
Main.hs
|
bsd-3-clause
| 3,002 | 0 | 16 | 715 | 839 | 406 | 433 | 57 | 2 |
module T where -- this shits me
import Sample
test_sanity = True
|
peteg/TBC
|
Sample/Tests/00_loadable.hs
|
bsd-3-clause
| 64 | 0 | 4 | 11 | 13 | 9 | 4 | 3 | 1 |
module Graph where
import Control.Monad
import Control.Monad.State
import Index
import Verilog
import Data.Graph.Inductive
import Data.Graph.Inductive.Query.DFS
import Data.Graph.Inductive.Dot
import Data.Graph.Inductive.NodeMap
import Data.List
import Control.Arrow
import Debug.Trace
import System.Random
import qualified Data.Map as M
import qualified Data.Set as S
-- import qualified Data.IntMap as M
-- | The graph that models the circuit after Nand Synthesis Model
type G = Gr Val Bool
type Ctx = Context Val Bool
val :: G -> Node -> Val
val g n = v
where
Just v = lab g n
type GState a = StateT G IdxState a
--type VG = Gr (NT, Bool) Bool
-- | Converts a graph to a GraphViz format
showGraph g = showDot $ fglToDot $ gmap (\(is, n, v, os) -> (is, n, (n, v), os)) g
-- | Creates all the nodes of the Graph
--wireNodes :: Verilog -> Ctx
--wireNodes v = [([], n, (), []) | n <- names v]
-- | Embeds all the wires in the graphs as disconnected nodes
startGraph :: G
startGraph = mkGraph [(0, ValZero), (1, ValOne)] []
addNode :: (Val, Node) -> GState Int
addNode (v, n) = do
g <- get
unless (gelem n g) $
modify $ insNode (n,v)
return n
newWire :: GState Int
newWire = do
n <- lift newIdx
addNode (Wire "<extra>", n)
getWire :: Val -> GState Int
getWire w = do
n <- lift $ getIdx w
addNode (w, n)
addEdge :: Bool -> (Node, Node) -> GState ()
addEdge b (n1, n2) =
modify $ insEdge (n1, n2, b)
initGraph :: Verilog -> GState ()
initGraph v = do
mapM_ getWire (map Input $ _inputs v)
trues = repeat True
falses = repeat False
-- | Embeds a Function in the graph.
embedF :: Function -> GState ()
embedF f@(Fun op os is) =
--trace ("Embedding function" ++ show f) $
case op of
And -> embedAnd is o
Nand -> embedNand is o
Or -> embedOr is o
Nor -> embedNor is o
Xor -> embedXor is o
Xnor -> embedXnor is o
Buf -> embedBuf i os
Not -> embedNot i os
where [i] = is
[o] = os
-- | Negates an Adjacency list
negateA :: Adj Bool -> Adj Bool
negateA = map (first not)
-- | Negates all output edges of a ctx
negateCtx (is, n, nv, os) = (is, n, nv, negateA os)
-- | Negates all output edges of a node
{-
negateV :: Node -> GState ()
negateV n = do
g <- get
let (mc, g') = match n g
case mc of
Just ctx -> put $ negateCtx ctx & g'
Nothing -> error "could not find vertex in negateV"
-}
-- | Should insert a few edges in the graph.
embedBuf :: Val -> [Val] -> GState ()
embedBuf iw ows = do
i <- getWire iw
os <- mapM getWire ows
embedBuf' i os
embedBuf' :: Node -> [Node] -> GState ()
embedBuf' i os = do
modify $ insEdges [(i,o, True) | o <- os]
-- | Inserts a Not function in the graph.
-- | Does this by negating all the outputs of a current vertex.
embedNot :: Val -> [Val] -> GState ()
embedNot iw ows = do
i <- getWire iw
os <- mapM getWire ows
modify $ insEdges [(i,o, False) | o <- os]
-- | Inserts an And function into the graph
embedAnd :: [Val] -> Val -> GState ()
embedAnd iws ow = do
is <- mapM getWire iws
o <- getWire ow
embedAnd' is o
embedAnd' :: [Node] -> Node -> GState ()
embedAnd' is o =
modify $ insEdges [(i,o, True) | i <- is]
-- | Inserts a Nand function into the graph
embedNand :: [Val] -> Val -> GState ()
embedNand iws ow = do
is <- mapM getWire iws
o <- getWire ow
embedNand' is o
embedNand' :: [Node] -> Node -> GState ()
embedNand' is o = do
n <- newWire
modify $ insEdge (n, o, False)
embedAnd' is n
-- | Inserts an Or function into the graph
embedOr :: [Val] -> Val -> GState ()
embedOr iws ow = do
is <- mapM getWire iws
o <- getWire ow
embedOr' is o
embedOr' :: [Node] -> Node -> GState ()
embedOr' is o = do
n <- newWire
modify $ insEdge (n, o, False)
embedNor' is n
-- | Inserts a Nor function into the graph
embedNor :: [Val] -> Val -> GState ()
embedNor iws ow = do
is <- mapM getWire iws
o <- getWire ow
embedNor' is o
embedNor' :: [Node] -> Node -> GState ()
embedNor' is o =
modify $ insEdges [(i, o, False) | i <- is]
-- | Inserts a Xor function into the graph
embedXor :: [Val] -> Val -> GState ()
embedXor iws ow = do
is <- mapM getWire iws
o <- getWire ow
embedXor' is o
embedXor' :: [Node] -> Node -> GState ()
embedXor' [i1, i2] o = do
n1 <- newWire
n2 <- newWire
embedAnd' [n1, n2] o
embedNand' [i1, i2] n1
embedOr' [i1, i2] n2
embedXor' (i1:i2:is) o = do
n <- newWire
embedXor' (n:is) o
embedXor' [i1, i2] n
-- | Inserts a Xnor gate into the graph
embedXnor iws ow = do
is <- mapM getWire iws
o <- getWire ow
n <- newWire
modify $ insEdge (n, o, False)
embedXor' is n
-- | Replaces a node with only one input and one output with an edge.
fixSingleNode :: Int -> G -> G
fixSingleNode n g = case match n g of
(Just ctx, g') -> case ctx of
([(vi, ni)], _, _, [(vo, no)]) -> insEdge (ni, no, not $ vi /= vo) g'
_ -> g
(Nothing, _) -> error "Could not match context in fixSingleNode"
-- | Cleans up graph after adding extra single nodes
fixSingleNodes g = --trace ("fix singles ")
g'
where g' = foldr fixSingleNode g (nodes g)
makeGraphV :: [Verilog] -> G
makeGraphV vs = g
where
(g, _, _, _) = makeGraphV' vs
makeGraphV' :: [Verilog] -> (G, M.Map Val Int, M.Map Val Int, M.Map String Int)
makeGraphV' vs =
runIdx $ flip evalStateT startGraph $ do
mapM_ initGraph vs
mapM_ makeGraphV1 vs
fixSingleNodes <$> get
makeGraphV1 :: Verilog -> GState G
makeGraphV1 v = do
inputs <- mapM getWire (map Input $ _inputs v)
wires <- mapM getWire (map Wire $ _inputs v)
mapM_ (addEdge True) $ zip inputs wires
mapM embedF $ reverse $ _functions v
wire_outs <- mapM getWire (map Wire $ _outputs v)
outs <- mapM getWire (map Output $ _outputs v)
mapM_ (addEdge True) $ zip wire_outs outs
lift resetIdx
get
--fixSingleNodes $
--trace "Finished Embeddeding all functinos" $
-- | Calculates the nodes without input edges
isInput (_, Input _) = True
isInput _ = False
getInputs :: G -> [Node]
getInputs g = [n |
(n, v) <- labNodes g
, isInput (n, v) ]
mybfs :: G -> [Node]
mybfs = topsort
-- | Calculates the nodes without output edges
isOutput (_, Output _) = True
isOutput _ = False
getOutputs :: G -> [Node]
getOutputs g = [n |
(n, v) <- labNodes g
, isOutput (n, v) ]
-- | Renumber the nodes according solely to their inputs,
-- | so nodes with the same inputs will have the same id
-- | regardless of the previous.
{-
mybfs :: Gr a b -> [Int]
mybfs g | isEmpty g = []
| otherwise = inputs g ++ (mybfs $ delNodes (inputs g) g)
-}
-- | simulates the circuit's behavior.
-- | Receives the graph of the circuit as input and a list of inputs, in order.
-- | produces the outputs, in order.
{-
simulate1 :: Context () Bool -> M.IntMap Bool -> M.IntMap Bool
simulate1 = undefined
{-
simulate1 ([], n, (), _) m = case M.lookup n m of
Just v -> m
Nothing -> error "Value for n should have been set."
simulate1 (is, n, (), _) m = M.insert n v m
where
v = and $ [m M.! i /= vi | (vi, i) <- is]
-}
simulate :: [(Int, Bool)] -> G -> [(Int, Bool)]
simulate = undefined
{-
simulate input_values g = [(o, m' M.! o) | o <- outputs g]
where
m = M.fromList input_values
m' = ufold simulate1 m g
-}
randomSimulateIO :: G -> IO [(Int, Bool)]
randomSimulateIO g = do
let is = inputs g
rs <- replicateM (length is) randomIO
return $ simulate (zip is rs) g
contexts :: G -> [Ctx]
contexts g = map (context g) (topsort g) --ufold (:) [] g
removeStuckAt0 :: [Int] -> G -> G
removeStuckAt0 = undefined -- TODO
-}
|
wuerges/vlsi_verification
|
src/Graph.hs
|
bsd-3-clause
| 7,727 | 0 | 14 | 1,984 | 2,401 | 1,235 | 1,166 | 176 | 8 |
{-# LANGUAGE OverloadedStrings, CPP #-}
module Main where
#ifndef TLS
import Control.Monad (when)
#endif
import Data.Version (showVersion)
import Network.Wai.Application.Classic hiding ((</>), (+++))
import System.Directory (getCurrentDirectory)
import System.Environment (getArgs, getEnvironment)
import System.Exit (exitFailure)
import System.FilePath (addTrailingPathSeparator, isAbsolute, normalise, (</>))
import System.IO
import Program.Mighty
import Server
import Paths_mighttpd2 as P
----------------------------------------------------------------
programName :: String
programName = "Mighttpd"
programVersion :: String
programVersion = showVersion P.version
----------------------------------------------------------------
main :: IO ()
main = do
(opt,route) <- getOptRoute
checkTLS opt
let reportFile = reportFileName opt
debug = opt_debug_mode opt
rpt <- initReporter debug reportFile >>= checkReporter reportFile
let run = server opt rpt route
if debug then run else background opt run
where
getOptRoute = getArgs >>= eachCase
svrnm = programName ++ "/" ++ programVersion
eachCase args
| n == 0 = do
root <- amIrootUser
let opt | root = (defaultOption svrnm) { opt_port = 80 }
| otherwise = defaultOption svrnm
env <- getEnvironment
let port = maybe (opt_port opt) read $ lookup "PORT" env
opt' = opt { opt_port = port }
dir <- getCurrentDirectory
let dst = fromString . addTrailingPathSeparator $ dir
route = [Block ["*"] [RouteFile "/" dst]]
return (opt', route)
| n == 2 = do
let config_file = args !! 0
routing_file <- getAbsoluteFile (args !! 1)
opt <- parseOption config_file svrnm
route <- parseRoute routing_file defaultDomain defaultPort
let opt' = opt {opt_routing_file = Just routing_file}
return (opt',route)
| otherwise = do
hPutStrLn stderr "Usage: mighty"
hPutStrLn stderr " mighty config_file routing_file"
exitFailure
where
n = length args
getAbsoluteFile file
| isAbsolute file = return file
| otherwise = do
dir <- getCurrentDirectory
return $ dir </> normalise file
checkReporter _ (Right rpt) = return rpt
checkReporter reportFile (Left e) = do
hPutStrLn stderr $ reportFile ++ " is not writable"
hPrint stderr e
exitFailure
#ifdef TLS
checkTLS _ = return ()
#else
checkTLS opt = when (opt_service opt > 1) $ do
hPutStrLn stderr "This mighty does not support TLS"
exitFailure
#endif
----------------------------------------------------------------
background :: Option -> IO () -> IO ()
background opt svr = do
putStrLn $ "Serving on port " ++ show port ++ " and detaching this terminal..."
putStrLn $ "(If errors occur, they will be written in \"" ++ reportFileName opt ++ "\".)"
hFlush stdout
daemonize svr
where
port = opt_port opt
reportFileName :: Option -> FilePath
reportFileName opt
| port == 80 = rfile
| otherwise = rfile ++ show port
where
rfile = opt_report_file opt
port = opt_port opt
|
mietek/mighttpd2
|
src/Mighty.hs
|
bsd-3-clause
| 3,290 | 0 | 16 | 866 | 868 | 432 | 436 | 78 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Webcrank.Internal.ETag where
import Control.Applicative
import Data.Attoparsec.ByteString.Char8 (parseOnly, string)
import Data.ByteString (ByteString)
import Webcrank.Internal.Parsers
import Webcrank.Internal.Types
-- | Compares two @ETag@s for equality, only considering them equal if
-- they are both strong and byte-for-byte identical.
strongComparison :: ETag -> ETag -> Bool
strongComparison e1 e2 = case (e1, e2) of
(StrongETag v1, StrongETag v2) -> v1 == v2
_ -> False
-- | Compares two @ETag@s for equality, considering them equal whether or
-- not either of them is weak.
weakComparison :: ETag -> ETag -> Bool
weakComparison e1 e2 = opaqueTag e1 == opaqueTag e2
opaqueTag :: ETag -> ByteString
opaqueTag e = case e of StrongETag v -> v; WeakETag v -> v
parseETags :: ByteString -> [ETag]
parseETags = either (const[]) id . parseOnly (csl1 etagP) where
etagP = weakP <|> strongP
weakP = WeakETag <$> (string "W/" *> quotedStringP)
strongP = StrongETag <$> quotedStringP
|
webcrank/webcrank.hs
|
src/Webcrank/Internal/ETag.hs
|
bsd-3-clause
| 1,040 | 0 | 10 | 171 | 270 | 148 | 122 | 20 | 2 |
module System.TellMe where
import Control.Monad (void)
import Data.Default (Default(..))
import Graphics.UI.Gtk
data Position = Top | Bottom
deriving (Show, Eq)
data Config = Config { screenNumber :: Int
, monitorNumber :: Int
, barHeight :: Int
, barPosition :: Position
, widgetSpacing :: Int
, startWidgets :: [Widget]
, endWidgets :: [Widget]
}
instance Default Config where
def = Config { screenNumber = 0
, monitorNumber = 0
, barHeight = 25
, barPosition = Bottom
, widgetSpacing = 10
, startWidgets = []
, endWidgets = []
}
setSize :: Config -> Window -> IO ()
setSize cfg window = do
screen <- windowGetScreen window
Rectangle x y w h <- screenGetMonitorGeometry screen $ monitorNumber cfg
let yoff = case barPosition cfg of
Top -> 0
Bottom -> h - barHeight cfg
windowMove window x $ y + yoff
windowSetGeometryHints window
(Nothing :: Maybe Widget)
(Just (w, barHeight cfg))
(Just (w, barHeight cfg))
Nothing
Nothing
Nothing
tellme :: Config -> IO ()
tellme cfg = do
void initGUI
Just disp <- displayGetDefault
screen <- displayGetScreen disp $ screenNumber cfg
window <- windowNew
widgetSetName window "TellMe"
windowSetTypeHint window WindowTypeHintDock
windowSetScreen window screen
setSize cfg window
void $ on screen screenMonitorsChanged $ setSize cfg window
box <- hBoxNew False $ widgetSpacing cfg
containerAdd window box
let addWidgets method widget = do
widgetSetSizeRequest widget (-1) $ barHeight cfg
method box widget PackNatural 0
mapM_ (addWidgets boxPackStart) $ startWidgets cfg
mapM_ (addWidgets boxPackEnd) $ endWidgets cfg
widgetShowAll window
mainGUI
|
izuk/tellme
|
src/System/TellMe.hs
|
bsd-3-clause
| 2,067 | 0 | 15 | 732 | 580 | 288 | 292 | 56 | 2 |
-- | This module defines messages from player to debugger
module IMsg
(
IMsg(..),
AMF(..),
AMFValue(..),
amfUndecoratedName,
nextIMessage
)
where
import Data.Word (Word8, Word16, Word32)
import Data.ByteString (ByteString, pack)
import qualified Data.ByteString.Char8 as BSChar
import qualified Data.Iteratee as I
import Data.Iteratee (Iteratee, Endian(LSB), endianRead4, endianRead2)
import Control.Monad (replicateM, when)
-- * Interface
-- | Messages sent by player
data IMsg
-- | 00 or 00
= IMsgMenuState Word32 Word32
-- | 03 or 03
| IMsgCreateAnonymObject Word32
-- | 05 or 05
| IMsgTrace String
-- | 0A or 10
| IMsgSetField Word32 ByteString [Word8]
-- | 0B or 11
| IMsgDeleteField Word32 ByteString
-- | 0C or 12
| IMsgMovieAttr ByteString ByteString
-- | 0E or 14
| IMsgSwdFileEntry Word32 Word32 ByteString ByteString Word32
-- | 0F or 15
| IMsgAskBreakpoints
-- | 10 or 16
| IMsgBreakHit Word16 Word16 Word32 ByteString
-- | 11 or 17
| IMsgBreak
-- | 12 or 18
| IMsgSetLocalVars Word32
-- | 13 or 19
| IMsgBreakpoints [(Word16, Word16)]
-- | 14 or 20
| IMsgNumSwdFileEntry Word32 Word32
-- | 19 or 25
| IMsgProcessTag
-- | 1A or 26
| IMsgVersion Word32 Word8
-- | 1B or 27
| IMsgBreakHitEx Word16 Word16 [(Word16, Word16, Word32, String)]
-- | 1C or 28
| IMsgSetField2 Word32 ByteString [Word8]
-- | 1E or 30
| IMsgGetField AMF [AMF]
-- | 1F or 31
| IMsgFunctionFrame Word32 Word32 AMF [AMF]
-- | 20 or 32
| IMsgDebuggerOption ByteString ByteString
-- | 24 or 36
| IMsgException Word32 String [Word8]
-- | All other
| IMsgUnknown Word32 [Word8]
deriving Show
-- | Represents Action Message Format entry
data AMF = AMF {
amfParent :: Word32,
amfName :: String,
amfFlags :: Word32,
amfValue :: AMFValue
} deriving Show
-- | Some objects (e.g. private members) could be decorated
amfUndecoratedName :: AMF -> String
amfUndecoratedName = reverse . takeWhile (/= ':') . reverse . amfName
-- | Represents AMF value
data AMFValue = AMFDouble Double
| AMFBool Bool
| AMFString String
| AMFObject Word32 Word32 Word16 Word16 String
| AMFNull
| AMFUndefined
| AMFTrails
deriving Show
-- | Read next message from player
nextIMessage :: Monad m => Iteratee ByteString m IMsg
nextIMessage = do
len <- endianRead4 e_
idi <- endianRead4 e_
case idi of
00 -> iterMenuState len
03 -> iterCreateAnonymObject len
05 -> iterTrace len
10 -> iterSetField len
11 -> iterDeleteField len
12 -> iterMovieAttr len
14 -> iterSwdFileEntry len
15 -> iterAskBreakpoints len
16 -> iterBreakHit len
17 -> iterBreak len
18 -> iterSetLocalVars len
19 -> iterBreakpoints len
20 -> iterNumSwdFileEntry len
25 -> iterProcessTag len
26 -> iterVersion len
27 -> iterBreakHitEx len
28 -> iterSetField2 len
30 -> iterGetField len
31 -> iterFunctionFrame len
32 -> iterDebuggerOption len
36 -> iterException len
_ -> iterUnknown idi len
-- * Internals
-- ** Iteratees to parse messages
iterGetField :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterGetField len = do
(amf, ln) <- takeAMF
children <- takeChildren (fromIntegral len - ln) []
return $ IMsgGetField amf children
iterDebuggerOption :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterDebuggerOption len = do
(op, ol) <- takeStr
(val, vl) <- takeStr
when (fromIntegral len /= ol + vl) (fail "iterDebuggerOption: wrong size")
return $ IMsgDebuggerOption op val
iterFunctionFrame :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterFunctionFrame len = do
depth <- endianRead4 e_
when (depth /= 0) (fail "iterFunctionFrame: depth != 0, not implemented")
addr <- endianRead4 e_
(amf, ln) <- takeAMF
children <- takeChildren (fromIntegral len - 4 - 4 - ln) []
return $ IMsgFunctionFrame depth addr amf children
takeChildren :: Monad m => Int -> [AMF] -> Iteratee ByteString m [AMF]
takeChildren 0 res = return $ reverse res
takeChildren l res = do
when (l < 0) (fail "iterFunctionFrame: wrong size")
(amf, vl) <- takeAMF
takeChildren (fromIntegral l - vl) (amf : res)
iterException :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterException len = do
arg1 <- endianRead4 e_
(ex, ln) <- takeStr
arg3 <- replicateM (fromIntegral len - 4 - ln) I.head
return $ IMsgException arg1 (bs2s ex) arg3
iterTrace :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterTrace len = do
(msg, ln) <- takeStr
when (len /= fromIntegral ln) (fail "iterTrace: wrong length")
return $ IMsgTrace $ bs2s msg
iterProcessTag :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterProcessTag len = do
when (len /= 0) (fail "iterProcessTag: wrong length")
return IMsgProcessTag
iterDeleteField :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterDeleteField len = do
addr <- endianRead4 e_
(name, ln) <- takeStr
when (len /= fromIntegral ln + 4) (fail "iterDeleteField: wrong length")
return $ IMsgDeleteField addr name
iterSetField :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterSetField len = do
addr <- endianRead4 e_
(name, ln) <- takeStr
amf <- replicateM (fromIntegral len - 4 - ln) I.head
return $ IMsgSetField addr name amf
iterSetField2 :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterSetField2 len = do
addr <- endianRead4 e_
(name, ln) <- takeStr
amf <- replicateM (fromIntegral len - 4 - ln) I.head
return $ IMsgSetField2 addr name amf
iterMenuState :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterMenuState len = do
when (len /= 8) (fail "iterMenuState: wrong length")
arg1 <- endianRead4 e_
arg2 <- endianRead4 e_
return $ IMsgMenuState arg1 arg2
iterCreateAnonymObject :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterCreateAnonymObject len = do
when (len /= 4) (fail "iterCreateAnonymObject: wrong length")
addr <- endianRead4 e_
return $ IMsgCreateAnonymObject addr
iterSetLocalVars :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterSetLocalVars len = do
when (len /= 4) (fail "iterSetLocalVars: wrong length")
addr <- endianRead4 e_
return $ IMsgSetLocalVars addr
iterBreak :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterBreak len = do
when (len /= 0) (fail "iterBreak: wrong length")
return IMsgBreak
-- XXX: Check length
iterBreakHitEx :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterBreakHitEx _ = do
fileId <- endianRead2 e_
line <- endianRead2 e_
depth <- endianRead4 e_
stack <- replicateM (fromIntegral depth) iterFrame
return $ IMsgBreakHitEx fileId line stack
where
iterFrame = do
fileId <- endianRead2 e_
line <- endianRead2 e_
addr <- endianRead4 e_
(entry, _) <- takeStr
return (fileId, line, addr, bs2s entry)
iterBreakHit :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterBreakHit len = do
fileId <- endianRead2 e_
line <- endianRead2 e_
addr <- endianRead4 e_
(function, ln) <- takeStr
when (len /= fromIntegral ln + 8) (fail "iterBreakHit: wrong length")
return $ IMsgBreakHit fileId line addr function
iterAskBreakpoints :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterAskBreakpoints len = do
when (len /= 0) (fail "iterAskBreakpoints: wrong length")
return IMsgAskBreakpoints
-- XXX: Check length
iterBreakpoints :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterBreakpoints _ = do
count <- endianRead4 e_
l <- replicateM (fromIntegral count) iter'
return $ IMsgBreakpoints l
where
iter' = do
fileId <- endianRead2 e_
line <- endianRead2 e_
return (fileId, line)
iterSwdFileEntry :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterSwdFileEntry len = do
fileId <- endianRead4 e_
unIndex <- endianRead4 e_
(name, ln1) <- takeStr
(source, ln2) <- takeStr
swfIndex <- endianRead4 e_
when (len /= fromIntegral (ln1 + ln2) + 12)
(fail "iterSwdFileEntry: wrong length")
return $ IMsgSwdFileEntry fileId unIndex name source swfIndex
iterUnknown :: Monad m => Word32 -> Word32 -> Iteratee ByteString m IMsg
iterUnknown idi len = do
dat <- replicateM (fromIntegral len) I.head
return $ IMsgUnknown idi dat
iterVersion :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterVersion len = do
when (len /= 5) (fail "iterVersion: wrong length")
major <- endianRead4 e_
minor <- I.head
return $ IMsgVersion major minor
iterMovieAttr :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterMovieAttr len = do
(name, ln1) <- takeStr
(value, ln2) <- takeStr
when (len /= fromIntegral (ln1 + ln2)) (fail "iterMovieAttr: wrong length")
return $ IMsgMovieAttr name value
iterNumSwdFileEntry :: Monad m => Word32 -> Iteratee ByteString m IMsg
iterNumSwdFileEntry len = do
when (len /= 8) (fail "iterNumSwdFileEntry: wrong length")
num <- endianRead4 e_
index <- endianRead4 e_
return $ IMsgNumSwdFileEntry num index
-- ** Utilities
e_ :: Endian
e_ = LSB
-- | Read zero terminated string
-- returns string and number of bytes read
takeStr :: Monad m => Iteratee ByteString m (ByteString, Int)
takeStr = takeStr' [] 0
where
takeStr' :: Monad m =>
[Word8] -> Int -> Iteratee ByteString m (ByteString, Int)
takeStr' cs len = do
c <- I.head
if c == 0
then return . flip (,) (len + 1) . pack . reverse $ cs
else takeStr' (c:cs) (len + 1)
-- | Read AMF
takeAMF :: Monad m => Iteratee ByteString m (AMF, Int)
takeAMF = do
parent <- endianRead4 e_
(name, nl) <- takeStr
vtype <- endianRead2 e_
flags <- endianRead4 e_
(value, vl) <- takeAMFValue vtype
return (AMF parent (bs2s name) flags value, 4 + nl + 2 + 4 + vl)
-- | Read AMF value
takeAMFValue :: Monad m => Word16 -> Iteratee ByteString m (AMFValue, Int)
takeAMFValue 0 = do
(str, ln) <- takeStr
return (AMFDouble . read . bs2s $ str, ln)
takeAMFValue 1 = do
v <- I.head
return (AMFBool (v /= 0), 1)
takeAMFValue 2 = do
(str, ln) <- takeStr
return (AMFString (bs2s str), ln)
takeAMFValue 3 = do
oid <- endianRead4 e_
tp <- endianRead4 e_
isF <- endianRead2 e_
r <- endianRead2 e_
(typeName, tl) <- takeStr
return (AMFObject oid tp isF r (bs2s typeName), 4 + 4 + 2 + 2 + tl)
takeAMFValue 5 = return (AMFNull, 0)
takeAMFValue 6 = return (AMFUndefined, 0)
takeAMFValue 19 = return (AMFTrails, 0)
takeAMFValue tp = fail $ "takeAMFValue: not implemented: " ++ show tp
-- | ByteString to String
bs2s :: ByteString -> String
bs2s = BSChar.unpack
|
Yuras/hfd
|
src/IMsg.hs
|
bsd-3-clause
| 10,504 | 0 | 16 | 2,276 | 3,538 | 1,753 | 1,785 | 266 | 22 |
-- Given N,M, this runs N copies of an M-deep pipeline1.
|
rrnewton/Haskell-CnC
|
examples/empty_task_topologies/par_pipelines.hs
|
bsd-3-clause
| 59 | 0 | 2 | 12 | 3 | 2 | 1 | 1 | 0 |
module Homework7.Tree where
import Data.Monoid
data Tree v a = Leaf v a | Branch v (Tree v a) (Tree v a)
tag :: Monoid v => Tree v a -> v
tag (Leaf _ _) = mempty
tag (Branch _ x y) = tag x <> tag y
branch :: Monoid v => Tree v a -> Tree v a -> Tree v a
branch x y = Branch (tag x <> tag y) x y
search :: Monoid v => (v -> Bool) -> Tree v a -> Maybe a
search p t
| p $ tag t = Just $ go mempty p t
| otherwise = Nothing
where
go i p (Leaf _ a) = a
go i p (Branch _ l r)
| p (i <> tag l) = go i p l
| otherwise = go (i <> tag l) p r
|
gabluc/CIS194-Solutions
|
src/Homework7/Tree.hs
|
bsd-3-clause
| 561 | 0 | 13 | 178 | 353 | 170 | 183 | 16 | 2 |
{-# LANGUAGE TupleSections, OverloadedStrings #-}
module Himpress.Compile
(compile)
where
import Himpress.Transitions
import Data.Lenses (fetch)
import Data.Monoid
import Data.List (mapAccumL)
import Data.Text (pack)
import Data.Map (fromList)
import Text.Blaze (Html,toHtml)
-- Protoslides - ie. slides with a bunch of text and un-composed transitions.
type PSlide = ([Html],[Transition])
emptyPSlide = ([],[])
splitIntoPSlides::[Element]->[PSlide]
splitIntoPSlides [] = []
splitIntoPSlides (l:ls) = out $ foldl split' (l,emptyPSlide `addToTuple` l,[]) ls
where split' (prev,slide,acc) new
| fusible prev new = (new, addToTuple slide new, acc)
| otherwise = (new, emptyPSlide `addToTuple` new, reverseTuple slide : acc)
-- Reverse the last slide, then the entire presentation.
out (_,l,x) = reverse $ reverseTuple l :x
reverseTuple (a,b) = (reverse a,reverse b)
addToTuple (a,b) = either ((,b) . (:a)) ((a,) . (:b))
-- Text , Text are on the same slide
-- Text, Transition are never
-- Transition, Transition are, iff the second is composable
-- Transition, Text are always
fusible (Left _) (Left _) = True
fusible (Left _) (Right _) = False
fusible (Right _) (Left _) = True
fusible (Right _) (Right t) = composes t
composes = either snd snd
-- Now we may compose all of the transitions on each slide, and get a set of
-- native attributes + a presentation state for each slide
-- This is then converted to a set of native attributes, and sent off for formating.
compose::(Int,Int)->[Transition]->(Native,PState)
compose size = foldl append (mempty,mempty)
where append st = either (combineNative st . fst) (combineChange st . fst)
combineNative (nat,st) new = (nat `mappend` new,st)
combineChange (nat,st) new = (nat, updateState size new st)
toNative::PState->Native
toNative p = Native {classes = mempty, attrs = fromList [
("data-scale", str scale p)
,("data-x", str x p)
,("data-y", str y p)
,("data-z", str z p)
]}
where str lens = pack . show . flip fetch lens
compile::(Int,Int)->[Element]->[Slide]
compile size = snd . mapAccumL buildSlide mempty . splitIntoPSlides
where buildSlide st (body,trans) = let (nat,pstate) = compose size trans
st' = st `mappend` pstate
in (st',(nat `mappend` toNative st', toHtml body))
|
matthewSorensen/himpress
|
Himpress/Compile.hs
|
bsd-3-clause
| 2,685 | 0 | 12 | 795 | 842 | 471 | 371 | 43 | 4 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
module Pact.MockDb where
import Pact.Types.Runtime
import Data.Aeson
import Data.String
import Data.Default
import Pact.Interpreter
rc :: a -> Method e a
rc = const . return
newtype MockRead =
MockRead (forall k v . (IsString k,FromJSON v) =>
Domain k v -> k -> Method () (Maybe v))
instance Default MockRead where def = MockRead (\_t _k -> rc Nothing)
newtype MockKeys =
MockKeys (forall k v . (IsString k,AsString k) => Domain k v -> Method () [k])
instance Default MockKeys where def = MockKeys (\_t -> rc [])
newtype MockTxIds =
MockTxIds (TableName -> TxId -> Method () [TxId])
instance Default MockTxIds where def = MockTxIds (\_t _i -> rc [])
newtype MockGetUserTableInfo =
MockGetUserTableInfo (TableName -> Method () ModuleName)
instance Default MockGetUserTableInfo where def = MockGetUserTableInfo (\_t -> rc "")
newtype MockCommitTx =
MockCommitTx (Method () [TxLog Value])
instance Default MockCommitTx where def = MockCommitTx (rc [])
newtype MockGetTxLog =
MockGetTxLog (forall k v . (IsString k,FromJSON v) =>
Domain k v -> TxId -> Method () [TxLog v])
instance Default MockGetTxLog where def = MockGetTxLog (\_t _i -> rc [])
data MockDb = MockDb {
mockRead :: MockRead,
mockKeys :: MockKeys,
mockTxIds :: MockTxIds,
mockGetUserTableInfo :: MockGetUserTableInfo,
mockCommitTx :: MockCommitTx,
mockGetTxLog :: MockGetTxLog
}
instance Default MockDb where def = MockDb def def def def def def
pactdb :: MockDb -> PactDb ()
pactdb (MockDb (MockRead r) (MockKeys ks) (MockTxIds tids) (MockGetUserTableInfo uti)
(MockCommitTx c) (MockGetTxLog gt)) = PactDb {
_readRow = r
,
_writeRow = \_wt _t _k _v -> rc ()
,
_keys = ks
,
_txids = tids
,
_createUserTable = \_t _m -> rc ()
,
_getUserTableInfo = uti
,
_beginTx = \_t -> rc Nothing
,
_commitTx = c
,
_rollbackTx = rc ()
,
_getTxLog = gt
}
mkMockEnv :: MockDb -> IO (PactDbEnv ())
mkMockEnv m = mkPactDbEnv (pactdb m) ()
|
kadena-io/pact
|
src-ghc/Pact/MockDb.hs
|
bsd-3-clause
| 2,063 | 0 | 13 | 438 | 759 | 413 | 346 | 53 | 1 |
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE RankNTypes #-}
-- | Network-related logic that's mostly methods and dialogs between
-- nodes. Also see "Pos.Chain.Block.Network.Retrieval" for retrieval worker
-- loop logic.
module Pos.Network.Block.Logic
(
BlockNetLogicException (..)
, triggerRecovery
, handleBlocks
, handleUnsolicitedHeader
) where
import Universum
import Control.Concurrent.STM (isFullTBQueue, readTVar, writeTBQueue,
writeTVar)
import Control.Exception (IOException)
import Control.Exception.Safe (Exception (..))
import qualified Data.List.NonEmpty as NE
import qualified Data.Map.Strict as M
import Formatting (bprint, build, sformat, shown, stext, (%))
import qualified Formatting.Buildable as B
import Serokell.Util.Text (listJson)
import qualified System.Metrics.Gauge as Metrics
import Pos.Chain.Block (ApplyBlocksException, Block, BlockHeader,
Blund, HasHeaderHash (..), HeaderHash, LastKnownHeaderTag,
blockHeader, gbHeader, headerHashG, prevBlockL)
import Pos.Chain.Genesis as Genesis (Config (..), configEpochSlots)
import Pos.Chain.Txp (TxpConfiguration)
import Pos.Core (SlotCount, isMoreDifficult)
import Pos.Core.Chrono (NE, NewestFirst (..), OldestFirst (..),
_NewestFirst, _OldestFirst)
import Pos.Core.Conc (forConcurrently)
import Pos.Core.Exception (cardanoExceptionFromException,
cardanoExceptionToException)
import Pos.Core.JsonLog (CanJsonLog (..))
import Pos.Core.Reporting (HasMisbehaviorMetrics (..),
MisbehaviorMetrics (..))
import Pos.Core.Slotting (MonadSlots (getCurrentSlot))
import Pos.Crypto (shortHashF)
import Pos.DB.Block (ClassifyHeaderRes (..), classifyNewHeader,
lcaWithMainChain, verifyAndApplyBlocks)
import qualified Pos.DB.Block as L
import qualified Pos.DB.Block as DB
import Pos.DB.GState.Lock (Priority (..), modifyStateLock)
import Pos.Infra.Communication.Protocol (NodeId)
import Pos.Infra.Diffusion.Types (Diffusion)
import qualified Pos.Infra.Diffusion.Types as Diffusion
import Pos.Infra.Recovery.Info (recoveryInProgress)
import Pos.Infra.Util.JsonLog.Events (MemPoolModifyReason (..),
jlAdoptedBlock)
import Pos.Network.Block.RetrievalQueue (BlockRetrievalQueue,
BlockRetrievalQueueTag, BlockRetrievalTask (..))
import Pos.Network.Block.WorkMode (BlockWorkMode)
import Pos.Util (buildListBounds, multilineBounds, _neLast)
import Pos.Util.AssertMode (inAssertMode)
import Pos.Util.Util (lensOf)
import Pos.Util.Wlog (logDebug, logInfo, logWarning)
----------------------------------------------------------------------------
-- Exceptions
----------------------------------------------------------------------------
-- FIXME this same thing is defined in full diffusion layer.
-- Must finish the proper factoring. There should be no 'Block.Network'
-- in cardano-sl-block; it should just use the Diffusion and Logic interfaces.
data BlockNetLogicException
= DialogUnexpected Text
-- ^ Node's response in any network/block related logic was
-- unexpected.
| BlockNetLogicInternal Text
-- ^ We don't expect this to happen. Most probably it's internal
-- logic error.
deriving (Show)
instance B.Buildable BlockNetLogicException where
build e = bprint ("BlockNetLogicException: "%shown) e
instance Exception BlockNetLogicException where
toException = cardanoExceptionToException
fromException = cardanoExceptionFromException
displayException = toString . pretty
----------------------------------------------------------------------------
-- Recovery
----------------------------------------------------------------------------
-- | Start recovery based on established communication. “Starting recovery”
-- means simply sending all our neighbors a 'MsgGetHeaders' message (see
-- 'requestTip'), so sometimes 'triggerRecovery' is used simply to ask for
-- tips.
--
-- Note that when recovery is in progress (see 'recoveryInProgress'),
-- 'triggerRecovery' does nothing. It's okay because when recovery is in
-- progress and 'ncRecoveryHeader' is full, we'll be requesting blocks anyway
-- and until we're finished we shouldn't be asking for new blocks.
triggerRecovery
:: ( BlockWorkMode ctx m
)
=> Genesis.Config -> Diffusion m -> m ()
triggerRecovery genesisConfig diffusion = unlessM (recoveryInProgress $ configEpochSlots genesisConfig) $ do
logDebug "Recovery triggered, requesting tips from neighbors"
-- The 'catch' here is for an exception when trying to enqueue the request.
-- In 'requestTipsAndProcess', IO exceptions are caught, for each
-- individual request per-peer. Those are not re-thrown.
void requestTipsAndProcess `catch`
\(e :: SomeException) -> do
logDebug ("Error happened in triggerRecovery: " <> show e)
throwM e
logDebug "Finished requesting tips for recovery"
where
requestTipsAndProcess = do
requestsMap <- Diffusion.requestTip diffusion
forConcurrently (M.toList requestsMap) $ \it@(nodeId, _) -> waitAndProcessOne it `catch`
-- Catch and squelch IOExceptions so that one failed request to one
-- particlar peer does not stop the others.
\(e :: IOException) ->
logDebug $ sformat ("Error requesting tip from "%shown%": "%shown) nodeId e
waitAndProcessOne (nodeId, mbh) = do
-- 'mbh' is an 'm' term that returns when the header has been
-- downloaded.
bh <- mbh
-- I know, it's not unsolicited. TODO rename.
handleUnsolicitedHeader genesisConfig bh nodeId
----------------------------------------------------------------------------
-- Headers processing
----------------------------------------------------------------------------
handleUnsolicitedHeader
:: BlockWorkMode ctx m
=> Genesis.Config
-> BlockHeader
-> NodeId
-> m ()
handleUnsolicitedHeader genesisConfig header nodeId = do
logDebug $ sformat
("handleUnsolicitedHeader: single header was propagated, processing:\n"
%build) header
classificationRes <- classifyNewHeader genesisConfig header
-- TODO: should we set 'To' hash to hash of header or leave it unlimited?
case classificationRes of
CHContinues -> do
logDebug $ sformat continuesFormat hHash
addHeaderToBlockRequestQueue nodeId header True
CHAlternative -> do
logDebug $ sformat alternativeFormat hHash
addHeaderToBlockRequestQueue nodeId header False
CHUseless reason -> logDebug $ sformat uselessFormat hHash reason
CHInvalid reason -> do
logWarning $ sformat invalidFormat hHash reason
pass -- TODO: ban node for sending invalid block.
where
hHash = headerHash header
continuesFormat =
"Header " %shortHashF %
" is a good continuation of our chain, will process"
alternativeFormat =
"Header " %shortHashF %
" potentially represents good alternative chain, will process"
uselessFormat =
"Header " %shortHashF%" is useless for the following reason: " %stext
invalidFormat =
"handleUnsolicitedHeader: header " %shortHashF%
" is invalid for the following reason: " %stext
----------------------------------------------------------------------------
-- Putting things into request queue
----------------------------------------------------------------------------
-- | Given a valid blockheader and nodeid, this function will put them into
-- download queue and they will be processed later.
addHeaderToBlockRequestQueue
:: forall ctx m.
(BlockWorkMode ctx m)
=> NodeId
-> BlockHeader
-> Bool -- ^ Was the block classified as chain continuation?
-> m ()
addHeaderToBlockRequestQueue nodeId header continues = do
let hHash = headerHash header
logDebug $ sformat ("addToBlockRequestQueue, : "%shortHashF) hHash
queue <- view (lensOf @BlockRetrievalQueueTag)
lastKnownH <- view (lensOf @LastKnownHeaderTag)
added <- atomically $ do
updateLastKnownHeader lastKnownH header
addTaskToBlockRequestQueue nodeId queue $
BlockRetrievalTask { brtHeader = header, brtContinues = continues }
if added
then logDebug $ sformat ("Added headers to block request queue: nodeId="%build%
", header="%build)
nodeId hHash
else logWarning $ sformat ("Failed to add headers from "%build%
" to block retrieval queue: queue is full")
nodeId
addTaskToBlockRequestQueue
:: NodeId
-> BlockRetrievalQueue
-> BlockRetrievalTask
-> STM Bool
addTaskToBlockRequestQueue nodeId queue task = do
ifM (isFullTBQueue queue)
(pure False)
(True <$ writeTBQueue queue (nodeId, task))
updateLastKnownHeader
:: TVar (Maybe BlockHeader)
-> BlockHeader
-> STM ()
updateLastKnownHeader lastKnownH header = do
oldV <- readTVar lastKnownH
let needUpdate = maybe True (header `isMoreDifficult`) oldV
when needUpdate $ writeTVar lastKnownH (Just header)
----------------------------------------------------------------------------
-- Handling blocks
----------------------------------------------------------------------------
-- | Carefully apply blocks that came from the network.
handleBlocks
:: forall ctx m .
( BlockWorkMode ctx m
, HasMisbehaviorMetrics ctx
)
=> Genesis.Config
-> TxpConfiguration
-> OldestFirst NE Block
-> Diffusion m
-> m ()
handleBlocks genesisConfig txpConfig blocks diffusion = do
logDebug "handleBlocks: processing"
inAssertMode $ logInfo $
sformat ("Processing sequence of blocks: " % buildListBounds % "...") $
getOldestFirst $ map headerHash blocks
maybe onNoLca handleBlocksWithLca =<<
lcaWithMainChain (map (view blockHeader) blocks)
inAssertMode $ logDebug $ "Finished processing sequence of blocks"
where
onNoLca = logWarning $
"Sequence of blocks can't be processed, because there is no LCA. " <>
"Probably rollback happened in parallel"
handleBlocksWithLca :: HeaderHash -> m ()
handleBlocksWithLca lcaHash = do
logDebug $ sformat ("Handling block w/ LCA, which is "%shortHashF) lcaHash
-- Head blund in result is the youngest one.
toRollback <- DB.loadBlundsFromTipWhile (configGenesisHash genesisConfig)
$ \blk -> headerHash blk /= lcaHash
maybe (applyWithoutRollback genesisConfig txpConfig diffusion blocks)
(applyWithRollback genesisConfig txpConfig diffusion blocks lcaHash)
(_NewestFirst nonEmpty toRollback)
applyWithoutRollback
:: forall ctx m.
( BlockWorkMode ctx m
, HasMisbehaviorMetrics ctx
)
=> Genesis.Config
-> TxpConfiguration
-> Diffusion m
-> OldestFirst NE Block
-> m ()
applyWithoutRollback genesisConfig txpConfig diffusion blocks = do
logInfo . sformat ("Trying to apply blocks w/o rollback. " % multilineBounds 6)
. getOldestFirst . map (view blockHeader) $ blocks
modifyStateLock HighPriority ApplyBlock applyWithoutRollbackDo >>= \case
Left (pretty -> err) ->
onFailedVerifyBlocks (getOldestFirst blocks) err
Right newTip -> do
when (newTip /= newestTip) $
logWarning $ sformat
("Only blocks up to "%shortHashF%" were applied, "%
"newer were considered invalid")
newTip
let toRelay =
fromMaybe (error "Listeners#applyWithoutRollback is broken") $
find (\b -> headerHash b == newTip) blocks
prefix = blocks
& _OldestFirst %~ NE.takeWhile ((/= newTip) . headerHash)
& map (view blockHeader)
applied = NE.fromList $
getOldestFirst prefix <> one (toRelay ^. blockHeader)
relayBlock epochSlots diffusion toRelay
logInfo $ blocksAppliedMsg applied
for_ blocks $ jsonLog . jlAdoptedBlock
where
epochSlots = configEpochSlots genesisConfig
newestTip = blocks ^. _OldestFirst . _neLast . headerHashG
applyWithoutRollbackDo
:: HeaderHash -> m (HeaderHash, Either ApplyBlocksException HeaderHash)
applyWithoutRollbackDo curTip = do
logInfo "Verifying and applying blocks..."
curSlot <- getCurrentSlot epochSlots
res <- fmap fst <$> verifyAndApplyBlocks genesisConfig txpConfig curSlot False blocks
logInfo "Verifying and applying blocks done"
let newTip = either (const curTip) identity res
pure (newTip, res)
applyWithRollback
:: ( BlockWorkMode ctx m
, HasMisbehaviorMetrics ctx
)
=> Genesis.Config
-> TxpConfiguration
-> Diffusion m
-> OldestFirst NE Block
-> HeaderHash
-> NewestFirst NE Blund
-> m ()
applyWithRollback genesisConfig txpConfig diffusion toApply lca toRollback = do
logInfo . sformat ("Trying to apply blocks w/o rollback. " % multilineBounds 6)
. getOldestFirst . map (view blockHeader) $ toApply
logInfo $ sformat ("Blocks to rollback "%listJson) toRollbackHashes
res <- modifyStateLock HighPriority ApplyBlockWithRollback $ \curTip -> do
res <- L.applyWithRollback genesisConfig txpConfig toRollback toApplyAfterLca
pure (either (const curTip) identity res, res)
case res of
Left err ->
logWarning $ "Couldn't apply blocks with rollback: " <> pretty err
Right newTip -> do
logDebug $ sformat
("Finished applying blocks w/ rollback, relaying new tip: "%shortHashF)
newTip
reportRollback
logInfo $ blocksRolledBackMsg (getNewestFirst toRollback)
logInfo $ blocksAppliedMsg (getOldestFirst toApply)
for_ (getOldestFirst toApply) $ jsonLog . jlAdoptedBlock
relayBlock (configEpochSlots genesisConfig) diffusion
$ toApply ^. _OldestFirst . _neLast
where
toRollbackHashes = fmap headerHash toRollback
reportRollback = do
let rollbackDepth = length toRollback
-- Commit rollback value to EKG
whenJustM (view misbehaviorMetrics) $ liftIO .
flip Metrics.set (fromIntegral rollbackDepth) . _mmRollbacks
panicBrokenLca = error "applyWithRollback: nothing after LCA :<"
toApplyAfterLca =
OldestFirst $
fromMaybe panicBrokenLca $ nonEmpty $
NE.dropWhile ((lca /=) . (^. prevBlockL)) $
getOldestFirst $ toApply
relayBlock
:: forall ctx m.
(BlockWorkMode ctx m)
=> SlotCount -> Diffusion m -> Block -> m ()
relayBlock _ _ (Left _) = logDebug "Not relaying Genesis block"
relayBlock epochSlots diffusion (Right mainBlk) = do
recoveryInProgress epochSlots >>= \case
True -> logDebug "Not relaying block in recovery mode"
False -> do
logDebug $ sformat ("Calling announceBlock for "%shortHashF%".")
(mainBlk ^. gbHeader . headerHashG)
void $ Diffusion.announceBlockHeader diffusion $ mainBlk ^. gbHeader
----------------------------------------------------------------------------
-- Common logging / logic sink points
----------------------------------------------------------------------------
-- TODO: ban node for it!
onFailedVerifyBlocks
:: forall ctx m .
BlockWorkMode ctx m
=> NonEmpty Block -> Text -> m ()
onFailedVerifyBlocks blocks err = do
logWarning $ sformat ("Failed to verify blocks: "%stext%"\n blocks = "%listJson)
err (fmap headerHash blocks)
throwM $ DialogUnexpected err
blocksAppliedMsg
:: forall a.
HasHeaderHash a
=> NonEmpty a -> Text
blocksAppliedMsg (block :| []) =
sformat ("Block has been adopted "%shortHashF) (headerHash block)
blocksAppliedMsg blocks =
sformat ("Blocks have been adopted: "%listJson) (fmap (headerHash @a) blocks)
blocksRolledBackMsg
:: forall a.
HasHeaderHash a
=> NonEmpty a -> Text
blocksRolledBackMsg =
sformat ("Blocks have been rolled back: "%listJson) . fmap (headerHash @a)
|
input-output-hk/pos-haskell-prototype
|
lib/src/Pos/Network/Block/Logic.hs
|
mit
| 16,755 | 0 | 21 | 4,139 | 3,240 | 1,706 | 1,534 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DuplicateRecordFields #-}
module Language.LSP.Types.CodeLens where
import Data.Aeson
import Data.Aeson.TH
import Language.LSP.Types.Command
import Language.LSP.Types.Location
import Language.LSP.Types.Progress
import Language.LSP.Types.TextDocument
import Language.LSP.Types.Utils
-- -------------------------------------
data CodeLensClientCapabilities =
CodeLensClientCapabilities
{ -- | Whether code lens supports dynamic registration.
_dynamicRegistration :: Maybe Bool
} deriving (Show, Read, Eq)
deriveJSON lspOptions ''CodeLensClientCapabilities
-- -------------------------------------
makeExtendingDatatype "CodeLensOptions" [''WorkDoneProgressOptions]
[ ("_resolveProvider", [t| Maybe Bool |] )]
deriveJSON lspOptions ''CodeLensOptions
makeExtendingDatatype "CodeLensRegistrationOptions"
[ ''TextDocumentRegistrationOptions
, ''CodeLensOptions
] []
deriveJSON lspOptions ''CodeLensRegistrationOptions
-- -------------------------------------
makeExtendingDatatype "CodeLensParams"
[ ''WorkDoneProgressParams,
''PartialResultParams
]
[("_textDocument", [t|TextDocumentIdentifier|])]
deriveJSON lspOptions ''CodeLensParams
-- -------------------------------------
-- | A code lens represents a command that should be shown along with source
-- text, like the number of references, a way to run tests, etc.
--
-- A code lens is _unresolved_ when no command is associated to it. For
-- performance reasons the creation of a code lens and resolving should be done
-- in two stages.
data CodeLens =
CodeLens
{ -- | The range in which this code lens is valid. Should only span a single line.
_range :: Range
, -- | The command this code lens represents.
_command :: Maybe Command
, -- | A data entry field that is preserved on a code lens item between
-- a code lens and a code lens resolve request.
_xdata :: Maybe Value
} deriving (Read,Show,Eq)
deriveJSON lspOptions ''CodeLens
|
alanz/haskell-lsp
|
lsp-types/src/Language/LSP/Types/CodeLens.hs
|
mit
| 2,015 | 0 | 9 | 313 | 285 | 168 | 117 | 35 | 0 |
------------------------------------------------------------------------
-- |
-- Module : ALife.Realtra.GeneratePopulation
-- Copyright : (c) Amy de Buitléir 2012-2014
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- ???
--
------------------------------------------------------------------------
{-# LANGUAGE TypeFamilies #-}
import ALife.Creatur (agentId)
import ALife.Realtra.Wain (Astronomer, randomAstronomer, summarise,
initialPopulationSize, initialPopulationClassifierSizeRange,
initialPopulationDeciderSizeRange, universe)
import ALife.Creatur.Wain (adjustEnergy)
import ALife.Creatur.Wain.Pretty (pretty)
import ALife.Creatur.Wain.Statistics (Statistic, stats)
import qualified ALife.Realtra.Config as Config
import ALife.Creatur.Universe (Universe, Agent, writeToLog, store)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Random (evalRandIO)
import Control.Monad.Random.Class (getRandomR)
import Control.Monad.State.Lazy (StateT, evalStateT)
names :: [String]
names = map (("Founder" ++) . show)
[1..(initialPopulationSize Config.config)]
introduceRandomAgent
:: (Universe u, Agent u ~ Astronomer)
=> String -> StateT u IO [Statistic]
introduceRandomAgent name = do
classifierSize
<- liftIO . evalRandIO $
getRandomR (initialPopulationClassifierSizeRange Config.config)
deciderSize
<- liftIO . evalRandIO $
getRandomR (initialPopulationDeciderSizeRange Config.config)
-- Make the first generation a little hungry so they start learning
-- immediately.
agent
<- fmap (adjustEnergy 0.8) . liftIO $
evalRandIO ( randomAstronomer name Config.config classifierSize
deciderSize )
writeToLog $ "GeneratePopulation: Created " ++ agentId agent
writeToLog $ "GeneratePopulation: Stats " ++ pretty (stats agent)
store agent
return (stats agent)
introduceRandomAgents
:: (Universe u, Agent u ~ Astronomer)
=> [String] -> StateT u IO ()
introduceRandomAgents ns = do
xs <- mapM introduceRandomAgent ns
summarise xs
main :: IO ()
main = do
print names
evalStateT (introduceRandomAgents names) (universe Config.config)
|
mhwombat/creatur-realtra.OLD
|
src/ALife/Realtra/GeneratePopulation.hs
|
bsd-3-clause
| 2,239 | 0 | 12 | 368 | 516 | 283 | 233 | 45 | 1 |
module Network.AMQP.Protocol where
import Control.Monad
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
import qualified Data.ByteString.Lazy.Char8 as BL
import Network.AMQP.Types
import Network.AMQP.Generated
--True if a content (contentheader and possibly contentbody) will follow the method
hasContent :: FramePayload -> Bool
hasContent (MethodPayload (Basic_get_ok _ _ _ _ _)) = True
hasContent (MethodPayload (Basic_deliver _ _ _ _ _)) = True
hasContent (MethodPayload (Basic_return _ _ _ _)) = True
hasContent _ = False
data Frame = Frame ChannelID FramePayload --channel, payload
deriving Show
instance Binary Frame where
get = do
fType <- getWord8
channel <- get :: Get ChannelID
payloadSize <- get :: Get PayloadSize
payload <- getPayload fType payloadSize :: Get FramePayload
0xCE <- getWord8 --frame end
return $ Frame channel payload
put (Frame chan payload) = do
putWord8 $ frameType payload
put chan
let buf = runPut $ putPayload payload
put ((fromIntegral $ BL.length buf)::PayloadSize)
putLazyByteString buf
putWord8 0xCE
-- gets the size of the frame
-- the bytestring should be at least 7 bytes long, otherwise this method will fail
peekFrameSize :: BL.ByteString -> PayloadSize
peekFrameSize = runGet f
where
f = do
void $ getWord8 -- 1 byte
void $ (get :: Get ChannelID) -- 2 bytes
get :: Get PayloadSize -- 4 bytes
data FramePayload =
MethodPayload MethodPayload
| ContentHeaderPayload ShortInt ShortInt LongLongInt ContentHeaderProperties --classID, weight, bodySize, propertyFields
| ContentBodyPayload BL.ByteString
| HeartbeatPayload
deriving Show
frameType :: FramePayload -> Word8
frameType (MethodPayload _) = 1
frameType (ContentHeaderPayload _ _ _ _) = 2
frameType (ContentBodyPayload _) = 3
frameType HeartbeatPayload = 8
getPayload :: Word8 -> PayloadSize -> Get FramePayload
getPayload 1 _ = do --METHOD FRAME
payLoad <- get :: Get MethodPayload
return (MethodPayload payLoad)
getPayload 2 _ = do --content header frame
classID <- get :: Get ShortInt
weight <- get :: Get ShortInt
bodySize <- get :: Get LongLongInt
props <- getContentHeaderProperties classID
return (ContentHeaderPayload classID weight bodySize props)
getPayload 3 payloadSize = do --content body frame
payload <- getLazyByteString $ fromIntegral payloadSize
return (ContentBodyPayload payload)
getPayload 8 payloadSize = do
-- ignoring the actual payload, but still need to read the bytes from the network buffer
_ <- getLazyByteString $ fromIntegral payloadSize
return HeartbeatPayload
getPayload n _ = error ("Unknown frame payload: " ++ show n)
putPayload :: FramePayload -> Put
putPayload (MethodPayload payload) = put payload
putPayload (ContentHeaderPayload classID weight bodySize p) = do
put classID
put weight
put bodySize
putContentHeaderProperties p
putPayload (ContentBodyPayload payload) = putLazyByteString payload
putPayload HeartbeatPayload = putLazyByteString BL.empty
|
bitemyapp/amqp
|
Network/AMQP/Protocol.hs
|
bsd-3-clause
| 3,169 | 0 | 13 | 674 | 817 | 406 | 411 | 73 | 1 |
module Vectorise.Type.TyConDecl (
vectTyConDecls
) where
import GhcPrelude
import Vectorise.Type.Type
import Vectorise.Monad
import Vectorise.Env( GlobalEnv( global_fam_inst_env ) )
import BuildTyCl( TcMethInfo, buildClass, buildDataCon, newTyConRepName )
import OccName
import Class
import Type
import TyCon
import DataCon
import DynFlags
import BasicTypes( DefMethSpec(..) )
import SrcLoc( SrcSpan, noSrcSpan )
import Var
import Name
import Outputable
import Util
import Control.Monad
-- |Vectorise some (possibly recursively defined) type constructors.
--
vectTyConDecls :: [TyCon] -> VM [TyCon]
vectTyConDecls tcs = fixV $ \tcs' ->
do { names' <- mapM (mkLocalisedName mkVectTyConOcc . tyConName) tcs
; mapM_ (uncurry (uncurry defTyConName)) (tcs `zip` names' `zipLazy` tcs')
; zipWithM vectTyConDecl tcs names'
}
-- |Vectorise a single type constructor.
--
vectTyConDecl :: TyCon -> Name -> VM TyCon
vectTyConDecl tycon name'
-- Type constructor representing a type class
| Just cls <- tyConClass_maybe tycon
= do { unless (null $ classATs cls) $
do dflags <- getDynFlags
cantVectorise dflags "Associated types are not yet supported" (ppr cls)
-- vectorise superclass constraint (types)
; theta' <- mapM vectType (classSCTheta cls)
-- vectorise method selectors
; let opItems = classOpItems cls
Just datacon = tyConSingleDataCon_maybe tycon
argTys = dataConRepArgTys datacon -- all selector types
opTys = drop (length argTys - length opItems) argTys -- only method types
; methods' <- sequence [ vectMethod id meth ty | ((id, meth), ty) <- zip opItems opTys]
-- construct the vectorised class (this also creates the class type constructors and its
-- data constructor)
--
-- NB: 'buildClass' attaches new quantifiers and dictionaries to the method types
; cls' <- liftDs $
buildClass
name' -- new name: "V:Class"
(tyConBinders tycon) -- keep original kind
(map (const Nominal) (tyConRoles tycon)) -- all role are N for safety
(snd . classTvsFds $ cls) -- keep the original functional dependencies
(Just (
theta', -- superclasses
[], -- no associated types (for the moment)
methods', -- method info
(classMinimalDef cls))) -- Inherit minimal complete definition from cls
-- the original dictionary constructor must map to the vectorised one
; let tycon' = classTyCon cls'
Just datacon = tyConSingleDataCon_maybe tycon
Just datacon' = tyConSingleDataCon_maybe tycon'
; defDataCon datacon datacon'
-- the original superclass and methods selectors must map to the vectorised ones
; let selIds = classAllSelIds cls
selIds' = classAllSelIds cls'
; zipWithM_ defGlobalVar selIds selIds'
-- return the type constructor of the vectorised class
; return tycon'
}
-- Regular algebraic type constructor — for now, Haskell 2011-style only
| isAlgTyCon tycon
= do { unless (all isVanillaDataCon (tyConDataCons tycon)) $
do dflags <- getDynFlags
cantVectorise dflags "Currently only Haskell 2011 datatypes are supported" (ppr tycon)
-- vectorise the data constructor of the class tycon
; rhs' <- vectAlgTyConRhs tycon (algTyConRhs tycon)
-- keep the original GADT flags
; let gadt_flag = isGadtSyntaxTyCon tycon
-- build the vectorised type constructor
; tc_rep_name <- mkDerivedName mkTyConRepOcc name'
; return $ mkAlgTyCon
name' -- new name
(tyConBinders tycon)
(tyConResKind tycon) -- keep original kind
(map (const Nominal) (tyConRoles tycon)) -- all roles are N for safety
Nothing
[] -- no stupid theta
rhs' -- new constructor defs
(VanillaAlgTyCon tc_rep_name)
gadt_flag -- whether in GADT syntax
}
-- some other crazy thing that we don't handle
| otherwise
= do dflags <- getDynFlags
cantVectorise dflags "Can't vectorise exotic type constructor" (ppr tycon)
-- |Vectorise a class method. (Don't enter it into the vectorisation map yet.)
--
vectMethod :: Id -> DefMethInfo -> Type -> VM TcMethInfo
vectMethod id defMeth ty
= do { -- Vectorise the method type.
; ty' <- vectType ty
-- Create a name for the vectorised method.
; id' <- mkVectId id ty'
; return (Var.varName id', ty', defMethSpecOfDefMeth defMeth)
}
-- | Convert a `DefMethInfo` to a `DefMethSpec`, which discards the name field in
-- the `DefMeth` constructor of the `DefMeth`.
defMethSpecOfDefMeth :: DefMethInfo -> Maybe (DefMethSpec (SrcSpan, Type))
defMethSpecOfDefMeth Nothing = Nothing
defMethSpecOfDefMeth (Just (_, VanillaDM)) = Just VanillaDM
defMethSpecOfDefMeth (Just (_, GenericDM ty)) = Just (GenericDM (noSrcSpan, ty))
-- |Vectorise the RHS of an algebraic type.
--
vectAlgTyConRhs :: TyCon -> AlgTyConRhs -> VM AlgTyConRhs
vectAlgTyConRhs tc (AbstractTyCon {})
= do dflags <- getDynFlags
cantVectorise dflags "Can't vectorise imported abstract type" (ppr tc)
vectAlgTyConRhs _tc (DataTyCon { data_cons = data_cons
, data_cons_size = data_cons_size
, is_enum = is_enum
})
= do { data_cons' <- mapM vectDataCon data_cons
; zipWithM_ defDataCon data_cons data_cons'
; return $ DataTyCon { data_cons = data_cons'
, data_cons_size = data_cons_size
, is_enum = is_enum
}
}
vectAlgTyConRhs tc (TupleTyCon { data_con = con })
= vectAlgTyConRhs tc (mkDataTyConRhs [con])
-- I'm not certain this is what you want to do for tuples,
-- but it's the behaviour we had before I refactored the
-- representation of AlgTyConRhs to add tuples
vectAlgTyConRhs tc (SumTyCon { data_cons = cons
, data_cons_size = data_cons_size })
= -- FIXME (osa): I'm pretty sure this is broken.. TupleTyCon case is probably
-- also broken when the tuple is unboxed.
vectAlgTyConRhs tc (DataTyCon { data_cons = cons
, data_cons_size = data_cons_size
, is_enum = all (((==) 0) . dataConRepArity) cons })
vectAlgTyConRhs tc (NewTyCon {})
= do dflags <- getDynFlags
cantVectorise dflags noNewtypeErr (ppr tc)
where
noNewtypeErr = "Vectorisation of newtypes not supported yet; please use a 'data' declaration"
-- |Vectorise a data constructor by vectorising its argument and return types..
--
vectDataCon :: DataCon -> VM DataCon
vectDataCon dc
| not . null $ ex_tvs
= do dflags <- getDynFlags
cantVectorise dflags "Can't vectorise constructor with existential type variables yet" (ppr dc)
| not . null $ eq_spec
= do dflags <- getDynFlags
cantVectorise dflags "Can't vectorise constructor with equality context yet" (ppr dc)
| not . null $ dataConFieldLabels dc
= do dflags <- getDynFlags
cantVectorise dflags "Can't vectorise constructor with labelled fields yet" (ppr dc)
| not . null $ theta
= do dflags <- getDynFlags
cantVectorise dflags "Can't vectorise constructor with constraint context yet" (ppr dc)
| otherwise
= do { name' <- mkLocalisedName mkVectDataConOcc name
; tycon' <- vectTyCon tycon
; arg_tys <- mapM vectType rep_arg_tys
; let ret_ty = mkFamilyTyConApp tycon' (mkTyVarTys univ_tvs)
; fam_envs <- readGEnv global_fam_inst_env
; rep_nm <- liftDs $ newTyConRepName name'
; let tag_map = mkTyConTagMap tycon'
; liftDs $ buildDataCon fam_envs
name'
(dataConIsInfix dc) -- infix if the original is
rep_nm
(dataConSrcBangs dc) -- strictness as original constructor
(Just $ dataConImplBangs dc)
[] -- no labelled fields for now
univ_tvs -- universally quantified vars
[] -- no existential tvs for now
user_bndrs
[] -- no equalities for now
[] -- no context for now
arg_tys -- argument types
ret_ty -- return type
tycon' -- representation tycon
tag_map
}
where
name = dataConName dc
rep_arg_tys = dataConRepArgTys dc
tycon = dataConTyCon dc
(univ_tvs, ex_tvs, eq_spec, theta, _arg_tys, _res_ty) = dataConFullSig dc
user_bndrs = dataConUserTyVarBinders dc
|
shlevy/ghc
|
compiler/vectorise/Vectorise/Type/TyConDecl.hs
|
bsd-3-clause
| 9,526 | 1 | 15 | 3,237 | 1,762 | 920 | 842 | 153 | 1 |
import Util
import Timing
import Randomish
import System.Environment
import Control.Exception
import qualified TreeLookupVectorised as TD
import qualified Data.Array.Parallel.PArray as P
import qualified Data.Vector.Unboxed as V
main
= do args <- getArgs
case args of
[alg, count] -> run alg (read count)
_ -> usage
run "vectorised" count
= do let arr = P.fromListPA [0 .. count - 1]
arr `seq` return ()
(arrResult, tElapsed)
<- time
$ let arr' = TD.treeLookupPA arr arr
in P.nfPA arr' `seq` return arr'
print $ P.lengthPA arrResult
putStr $ prettyTime tElapsed
run _ _
= usage
usage = putStr $ unlines
[ "usage: indices <algorithm> <count>\n"
, " algorithm one of " ++ show ["vectorised"]
, ""]
|
mainland/dph
|
dph-lifted-vseg/examples/treeLookup/Main.hs
|
bsd-3-clause
| 908 | 0 | 14 | 325 | 253 | 135 | 118 | 28 | 2 |
{-| Cluster rebalancer.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.HTools.Program.Hbal
( main
, options
, arguments
, iterateDepth
) where
import Control.Exception (bracket)
import Control.Monad
import Data.List
import Data.Maybe (isJust, isNothing, fromJust)
import Data.IORef
import System.Exit
import System.IO
import System.Posix.Process
import System.Posix.Signals
import Text.Printf (printf)
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Cluster as Cluster
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
import Ganeti.BasicTypes
import Ganeti.Common
import Ganeti.HTools.CLI
import Ganeti.HTools.ExtLoader
import Ganeti.HTools.Types
import Ganeti.HTools.Loader
import Ganeti.OpCodes (wrapOpCode, setOpComment, setOpPriority,
OpCode, MetaOpCode)
import Ganeti.Jobs as Jobs
import Ganeti.Types
import Ganeti.Utils
import qualified Ganeti.Luxi as L
import Ganeti.Version (version)
-- | Options list and functions.
options :: IO [OptType]
options = do
luxi <- oLuxiSocket
return
[ oPrintNodes
, oPrintInsts
, oPrintCommands
, oDataFile
, oEvacMode
, oRapiMaster
, luxi
, oIAllocSrc
, oExecJobs
, oGroup
, oMaxSolLength
, oVerbose
, oQuiet
, oOfflineNode
, oMinScore
, oMaxCpu
, oMinDisk
, oMinGain
, oMinGainLim
, oDiskMoves
, oSelInst
, oInstMoves
, oDynuFile
, oExTags
, oExInst
, oSaveCluster
, oPriority
]
-- | The list of arguments supported by the program.
arguments :: [ArgCompletion]
arguments = []
-- | A simple type alias for clearer signature.
type Annotator = OpCode -> MetaOpCode
-- | Wraps an 'OpCode' in a 'MetaOpCode' while also adding a comment
-- about what generated the opcode.
annotateOpCode :: Annotator
annotateOpCode =
setOpComment ("rebalancing via hbal " ++ version) . wrapOpCode
{- | Start computing the solution at the given depth and recurse until
we find a valid solution or we exceed the maximum depth.
-}
iterateDepth :: Bool -- ^ Whether to print moves
-> Cluster.Table -- ^ The starting table
-> Int -- ^ Remaining length
-> Bool -- ^ Allow disk moves
-> Bool -- ^ Allow instance moves
-> Int -- ^ Max node name len
-> Int -- ^ Max instance name len
-> [MoveJob] -- ^ Current command list
-> Score -- ^ Score at which to stop
-> Score -- ^ Min gain limit
-> Score -- ^ Min score gain
-> Bool -- ^ Enable evacuation mode
-> IO (Cluster.Table, [MoveJob]) -- ^ The resulting table
-- and commands
iterateDepth printmove ini_tbl max_rounds disk_moves inst_moves nmlen imlen
cmd_strs min_score mg_limit min_gain evac_mode =
let Cluster.Table ini_nl ini_il _ _ = ini_tbl
allowed_next = Cluster.doNextBalance ini_tbl max_rounds min_score
m_fin_tbl = if allowed_next
then Cluster.tryBalance ini_tbl disk_moves inst_moves
evac_mode mg_limit min_gain
else Nothing
in case m_fin_tbl of
Just fin_tbl ->
do
let (Cluster.Table _ _ _ fin_plc) = fin_tbl
cur_plc@(idx, _, _, move, _) <-
exitIfEmpty "Empty placement list returned for solution?!" fin_plc
let fin_plc_len = length fin_plc
(sol_line, cmds) = Cluster.printSolutionLine ini_nl ini_il
nmlen imlen cur_plc fin_plc_len
afn = Cluster.involvedNodes ini_il cur_plc
upd_cmd_strs = (afn, idx, move, cmds):cmd_strs
when printmove $ do
putStrLn sol_line
hFlush stdout
iterateDepth printmove fin_tbl max_rounds disk_moves inst_moves
nmlen imlen upd_cmd_strs min_score
mg_limit min_gain evac_mode
Nothing -> return (ini_tbl, cmd_strs)
-- | Displays the cluster stats.
printStats :: Node.List -> Node.List -> IO ()
printStats ini_nl fin_nl = do
let ini_cs = Cluster.totalResources ini_nl
fin_cs = Cluster.totalResources fin_nl
printf "Original: mem=%d disk=%d\n"
(Cluster.csFmem ini_cs) (Cluster.csFdsk ini_cs) :: IO ()
printf "Final: mem=%d disk=%d\n"
(Cluster.csFmem fin_cs) (Cluster.csFdsk fin_cs)
-- | Saves the rebalance commands to a text file.
saveBalanceCommands :: Options -> String -> IO ()
saveBalanceCommands opts cmd_data = do
let out_path = fromJust $ optShowCmds opts
putStrLn ""
if out_path == "-"
then printf "Commands to run to reach the above solution:\n%s"
(unlines . map (" " ++) .
filter (/= " check") .
lines $ cmd_data)
else do
writeFile out_path (shTemplate ++ cmd_data)
printf "The commands have been written to file '%s'\n" out_path
-- | Wrapper over execJobSet checking for early termination via an IORef.
execCancelWrapper :: Annotator -> String -> Node.List
-> Instance.List -> IORef Int -> [JobSet] -> IO (Result ())
execCancelWrapper _ _ _ _ _ [] = return $ Ok ()
execCancelWrapper anno master nl il cref alljss = do
cancel <- readIORef cref
if cancel > 0
then do
putStrLn $ "Exiting early due to user request, " ++
show (length alljss) ++ " jobset(s) remaining."
return $ Ok ()
else execJobSet anno master nl il cref alljss
-- | Execute an entire jobset.
execJobSet :: Annotator -> String -> Node.List
-> Instance.List -> IORef Int -> [JobSet] -> IO (Result ())
execJobSet _ _ _ _ _ [] = return $ Ok ()
execJobSet anno master nl il cref (js:jss) = do
-- map from jobset (htools list of positions) to [[opcodes]]
let jobs = map (\(_, idx, move, _) ->
map anno $ Cluster.iMoveToJob nl il idx move) js
descr = map (\(_, idx, _, _) -> Container.nameOf il idx) js
logfn = putStrLn . ("Got job IDs" ++) . commaJoin . map (show . fromJobId)
putStrLn $ "Executing jobset for instances " ++ commaJoin descr
jrs <- bracket (L.getClient master) L.closeClient $
Jobs.execJobsWait jobs logfn
case jrs of
Bad x -> return $ Bad x
Ok x -> if null failures
then execCancelWrapper anno master nl il cref jss
else return . Bad . unlines $ [
"Not all jobs completed successfully: " ++ show failures,
"Aborting."]
where
failures = filter ((/= JOB_STATUS_SUCCESS) . snd) x
-- | Executes the jobs, if possible and desired.
maybeExecJobs :: Options
-> [a]
-> Node.List
-> Instance.List
-> [JobSet]
-> IO (Result ())
maybeExecJobs opts ord_plc fin_nl il cmd_jobs =
if optExecJobs opts && not (null ord_plc)
then (case optLuxi opts of
Nothing ->
return $ Bad "Execution of commands possible only on LUXI"
Just master ->
let annotator = maybe id setOpPriority (optPriority opts) .
annotateOpCode
in execWithCancel annotator master fin_nl il cmd_jobs)
else return $ Ok ()
-- | Signal handler for graceful termination.
handleSigInt :: IORef Int -> IO ()
handleSigInt cref = do
writeIORef cref 1
putStrLn ("Cancel request registered, will exit at" ++
" the end of the current job set...")
-- | Signal handler for immediate termination.
handleSigTerm :: IORef Int -> IO ()
handleSigTerm cref = do
-- update the cref to 2, just for consistency
writeIORef cref 2
putStrLn "Double cancel request, exiting now..."
exitImmediately $ ExitFailure 2
-- | Prepares to run a set of jobsets with handling of signals and early
-- termination.
execWithCancel :: Annotator -> String -> Node.List -> Instance.List -> [JobSet]
-> IO (Result ())
execWithCancel anno master fin_nl il cmd_jobs = do
cref <- newIORef 0
mapM_ (\(hnd, sig) -> installHandler sig (Catch (hnd cref)) Nothing)
[(handleSigTerm, softwareTermination), (handleSigInt, keyboardSignal)]
execCancelWrapper anno master fin_nl il cref cmd_jobs
-- | Select the target node group.
selectGroup :: Options -> Group.List -> Node.List -> Instance.List
-> IO (String, (Node.List, Instance.List))
selectGroup opts gl nlf ilf = do
let ngroups = Cluster.splitCluster nlf ilf
when (length ngroups > 1 && isNothing (optGroup opts)) $ do
hPutStrLn stderr "Found multiple node groups:"
mapM_ (hPutStrLn stderr . (" " ++) . Group.name .
flip Container.find gl . fst) ngroups
exitErr "Aborting."
case optGroup opts of
Nothing -> do
(gidx, cdata) <- exitIfEmpty "No groups found by splitCluster?!" ngroups
let grp = Container.find gidx gl
return (Group.name grp, cdata)
Just g -> case Container.findByName gl g of
Nothing -> do
hPutStrLn stderr $ "Node group " ++ g ++
" not found. Node group list is:"
mapM_ (hPutStrLn stderr . (" " ++) . Group.name ) (Container.elems gl)
exitErr "Aborting."
Just grp ->
case lookup (Group.idx grp) ngroups of
Nothing ->
-- This will only happen if there are no nodes assigned
-- to this group
return (Group.name grp, (Container.empty, Container.empty))
Just cdata -> return (Group.name grp, cdata)
-- | Do a few checks on the cluster data.
checkCluster :: Int -> Node.List -> Instance.List -> IO ()
checkCluster verbose nl il = do
-- nothing to do on an empty cluster
when (Container.null il) $ do
printf "Cluster is empty, exiting.\n"::IO ()
exitSuccess
-- hbal doesn't currently handle split clusters
let split_insts = Cluster.findSplitInstances nl il
unless (null split_insts || verbose <= 1) $ do
hPutStrLn stderr "Found instances belonging to multiple node groups:"
mapM_ (\i -> hPutStrLn stderr $ " " ++ Instance.name i) split_insts
hPutStrLn stderr "These instances will not be moved."
printf "Loaded %d nodes, %d instances\n"
(Container.size nl)
(Container.size il)::IO ()
let csf = commonSuffix nl il
when (not (null csf) && verbose > 1) $
printf "Note: Stripping common suffix of '%s' from names\n" csf
-- | Do a few checks on the selected group data.
checkGroup :: Int -> String -> Node.List -> Instance.List -> IO ()
checkGroup verbose gname nl il = do
printf "Group size %d nodes, %d instances\n"
(Container.size nl)
(Container.size il)::IO ()
putStrLn $ "Selected node group: " ++ gname
let (bad_nodes, bad_instances) = Cluster.computeBadItems nl il
unless (verbose < 1) $ printf
"Initial check done: %d bad nodes, %d bad instances.\n"
(length bad_nodes) (length bad_instances)
unless (null bad_nodes) $
putStrLn "Cluster is not N+1 happy, continuing but no guarantee \
\that the cluster will end N+1 happy."
-- | Check that we actually need to rebalance.
checkNeedRebalance :: Options -> Score -> IO ()
checkNeedRebalance opts ini_cv = do
let min_cv = optMinScore opts
when (ini_cv < min_cv) $ do
printf "Cluster is already well balanced (initial score %.6g,\n\
\minimum score %.6g).\nNothing to do, exiting\n"
ini_cv min_cv:: IO ()
exitSuccess
-- | Main function.
main :: Options -> [String] -> IO ()
main opts args = do
unless (null args) $ exitErr "This program doesn't take any arguments."
let verbose = optVerbose opts
shownodes = optShowNodes opts
showinsts = optShowInsts opts
ini_cdata@(ClusterData gl fixed_nl ilf ctags ipol) <- loadExternalData opts
when (verbose > 1) $ do
putStrLn $ "Loaded cluster tags: " ++ intercalate "," ctags
putStrLn $ "Loaded cluster ipolicy: " ++ show ipol
nlf <- setNodeStatus opts fixed_nl
checkCluster verbose nlf ilf
maybeSaveData (optSaveCluster opts) "original" "before balancing" ini_cdata
(gname, (nl, il)) <- selectGroup opts gl nlf ilf
checkGroup verbose gname nl il
maybePrintInsts showinsts "Initial" (Cluster.printInsts nl il)
maybePrintNodes shownodes "Initial cluster" (Cluster.printNodes nl)
let ini_cv = Cluster.compCV nl
ini_tbl = Cluster.Table nl il ini_cv []
min_cv = optMinScore opts
checkNeedRebalance opts ini_cv
if verbose > 2
then printf "Initial coefficients: overall %.8f\n%s"
ini_cv (Cluster.printStats " " nl)::IO ()
else printf "Initial score: %.8f\n" ini_cv
putStrLn "Trying to minimize the CV..."
let imlen = maximum . map (length . Instance.alias) $ Container.elems il
nmlen = maximum . map (length . Node.alias) $ Container.elems nl
(fin_tbl, cmd_strs) <- iterateDepth True ini_tbl (optMaxLength opts)
(optDiskMoves opts)
(optInstMoves opts)
nmlen imlen [] min_cv
(optMinGainLim opts) (optMinGain opts)
(optEvacMode opts)
let (Cluster.Table fin_nl fin_il fin_cv fin_plc) = fin_tbl
ord_plc = reverse fin_plc
sol_msg = case () of
_ | null fin_plc -> printf "No solution found\n"
| verbose > 2 ->
printf "Final coefficients: overall %.8f\n%s"
fin_cv (Cluster.printStats " " fin_nl)
| otherwise ->
printf "Cluster score improved from %.8f to %.8f\n"
ini_cv fin_cv ::String
putStr sol_msg
unless (verbose < 1) $
printf "Solution length=%d\n" (length ord_plc)
let cmd_jobs = Cluster.splitJobs cmd_strs
when (isJust $ optShowCmds opts) .
saveBalanceCommands opts $ Cluster.formatCmds cmd_jobs
maybeSaveData (optSaveCluster opts) "balanced" "after balancing"
ini_cdata { cdNodes = fin_nl, cdInstances = fin_il }
maybePrintInsts showinsts "Final" (Cluster.printInsts fin_nl fin_il)
maybePrintNodes shownodes "Final cluster" (Cluster.printNodes fin_nl)
when (verbose > 3) $ printStats nl fin_nl
exitIfBad "hbal" =<< maybeExecJobs opts ord_plc fin_nl il cmd_jobs
|
dblia/nosql-ganeti
|
src/Ganeti/HTools/Program/Hbal.hs
|
gpl-2.0
| 15,326 | 0 | 19 | 4,316 | 3,695 | 1,858 | 1,837 | 308 | 4 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ms-MY">
<title>Getting started Guide</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
rnehra01/zap-extensions
|
src/org/zaproxy/zap/extension/gettingStarted/resources/help_ms_MY/helpset_ms_MY.hs
|
apache-2.0
| 967 | 79 | 66 | 158 | 411 | 208 | 203 | -1 | -1 |
{- System.Directory without its conflicting isSymbolicLink
-
- Copyright 2016 Joey Hess <[email protected]>
-
- License: BSD-2-clause
-}
-- Disable warnings because only some versions of System.Directory export
-- isSymbolicLink.
{-# OPTIONS_GHC -fno-warn-tabs -w #-}
module Utility.SystemDirectory (
module System.Directory
) where
import System.Directory hiding (isSymbolicLink, getFileSize)
|
ArchiveTeam/glowing-computing-machine
|
src/Utility/SystemDirectory.hs
|
bsd-2-clause
| 399 | 2 | 5 | 56 | 34 | 23 | 11 | 4 | 0 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
module StaticFiles where
import Yesod.Static
staticFiles "static"
|
danse/haskellers
|
StaticFiles.hs
|
bsd-2-clause
| 136 | 0 | 5 | 17 | 17 | 10 | 7 | 5 | 0 |
data TestType = Foo | Bar
hello :: String
hello = 'H':"ello World!"
-- ziguser = zig
zig :: [TestType]
zig = (Foo:zag)
zag :: [TestType]
zag = Bar:zig
main = print hello
|
themattchan/tandoori
|
input/input-2.hs
|
bsd-3-clause
| 197 | 0 | 6 | 60 | 71 | 41 | 30 | 8 | 1 |
module DependencyTest
( tests
)
where
import Test.HUnit
import Data.Graph.Inductive.Graph ( Graph(..) )
import Database.Schema.Migrations.Dependencies
import Common
tests :: [Test]
tests = depGraphTests ++ dependencyTests
type DepGraphTestCase = ([TestDependable], Either String (DependencyGraph TestDependable))
depGraphTestCases :: [DepGraphTestCase]
depGraphTestCases = [ ( []
, Right $ DG [] [] empty
)
, ( [first, second]
, Right $ DG [(first,1),(second,2)]
[("first",1),("second",2)] (mkGraph [(1, "first"), (2, "second")]
[(2, 1, "first -> second")])
)
, ( [cycleFirst, cycleSecond]
, Left "Invalid dependency graph; cycle detected")
]
where
first = TD "first" []
second = TD "second" ["first"]
cycleFirst = TD "first" ["second"]
cycleSecond = TD "second" ["first"]
depGraphTests :: [Test]
depGraphTests = map mkDepGraphTest depGraphTestCases
mkDepGraphTest :: DepGraphTestCase -> Test
mkDepGraphTest (input, expected) = expected ~=? mkDepGraph input
data Direction = Forward | Reverse deriving (Show)
type DependencyTestCase = ([TestDependable], String, Direction, [String])
dependencyTestCases :: [DependencyTestCase]
dependencyTestCases = [ ([TD "first" []], "first", Forward, [])
, ([TD "first" []], "first", Reverse, [])
, ([TD "first" ["second"], TD "second" []], "first", Forward, ["second"])
, ([TD "first" ["second"], TD "second" []], "second", Reverse, ["first"])
, ([TD "first" ["second"], TD "second" ["third"], TD "third" []], "first", Forward, ["third", "second"])
, ([TD "first" ["second"], TD "second" ["third"], TD "third" [], TD "fourth" ["third"]]
, "first", Forward, ["third", "second"])
, ([TD "first" [], TD "second" ["first"]]
, "first", Reverse, ["second"])
, ([TD "first" [], TD "second" ["first"], TD "third" ["second"]]
, "first", Reverse, ["third", "second"])
, ([TD "first" [], TD "second" ["first"], TD "third" ["second"], TD "fourth" ["second"]]
, "first", Reverse, ["fourth", "third", "second"])
, ([ TD "first" ["second"], TD "second" ["third"], TD "third" ["fourth"]
, TD "second" ["fifth"], TD "fifth" ["third"], TD "fourth" []]
, "fourth", Reverse, ["first", "second", "fifth", "third"])
, ([ TD "first" ["second"], TD "second" ["third", "fifth"], TD "third" ["fourth"]
, TD "fifth" ["third"], TD "fourth" []]
, "first", Forward, ["fourth", "third", "fifth", "second"])
]
fromRight :: Either a b -> b
fromRight (Left _) = error "Got a Left value"
fromRight (Right v) = v
mkDependencyTest :: DependencyTestCase -> Test
mkDependencyTest testCase@(deps, a, dir, expected) =
let f = case dir of
Forward -> dependencies
Reverse -> reverseDependencies
in (show testCase) ~: expected ~=? f (fromRight $ mkDepGraph deps) a
dependencyTests :: [Test]
dependencyTests = map mkDependencyTest dependencyTestCases
|
nathankot/dbmigrations
|
test/DependencyTest.hs
|
bsd-3-clause
| 3,528 | 0 | 12 | 1,159 | 1,130 | 655 | 475 | 59 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module T8455 where
ty = [t| 5 |]
|
sdiehl/ghc
|
testsuite/tests/quotes/T8455.hs
|
bsd-3-clause
| 105 | 0 | 4 | 18 | 15 | 12 | 3 | -1 | -1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Utilities for desugaring
This module exports some utility functions of no great interest.
-}
{-# LANGUAGE CPP #-}
-- | Utility functions for constructing Core syntax, principally for desugaring
module DsUtils (
EquationInfo(..),
firstPat, shiftEqns,
MatchResult(..), CanItFail(..), CaseAlt(..),
cantFailMatchResult, alwaysFailMatchResult,
extractMatchResult, combineMatchResults,
adjustMatchResult, adjustMatchResultDs,
mkCoLetMatchResult, mkViewMatchResult, mkGuardedMatchResult,
matchCanFail, mkEvalMatchResult,
mkCoPrimCaseMatchResult, mkCoAlgCaseMatchResult, mkCoSynCaseMatchResult,
wrapBind, wrapBinds,
mkErrorAppDs, mkCoreAppDs, mkCoreAppsDs, mkCastDs,
seqVar,
-- LHs tuples
mkLHsVarPatTup, mkLHsPatTup, mkVanillaTuplePat,
mkBigLHsVarTupId, mkBigLHsTupId, mkBigLHsVarPatTupId, mkBigLHsPatTupId,
mkSelectorBinds,
selectSimpleMatchVarL, selectMatchVars, selectMatchVar,
mkOptTickBox, mkBinaryTickBox, decideBangHood
) where
#include "HsVersions.h"
import {-# SOURCE #-} Match ( matchSimply )
import HsSyn
import TcHsSyn
import TcType( tcSplitTyConApp )
import CoreSyn
import DsMonad
import {-# SOURCE #-} DsExpr ( dsLExpr )
import CoreUtils
import MkCore
import MkId
import Id
import Literal
import TyCon
-- import ConLike
import DataCon
import PatSyn
import Type
import Coercion
import TysPrim
import TysWiredIn
import BasicTypes
import ConLike
import UniqSet
import UniqSupply
import Module
import PrelNames
import Outputable
import SrcLoc
import Util
import DynFlags
import FastString
import qualified GHC.LanguageExtensions as LangExt
import TcEvidence
import Control.Monad ( zipWithM )
{-
************************************************************************
* *
\subsection{ Selecting match variables}
* *
************************************************************************
We're about to match against some patterns. We want to make some
@Ids@ to use as match variables. If a pattern has an @Id@ readily at
hand, which should indeed be bound to the pattern as a whole, then use it;
otherwise, make one up.
-}
selectSimpleMatchVarL :: LPat Id -> DsM Id
selectSimpleMatchVarL pat = selectMatchVar (unLoc pat)
-- (selectMatchVars ps tys) chooses variables of type tys
-- to use for matching ps against. If the pattern is a variable,
-- we try to use that, to save inventing lots of fresh variables.
--
-- OLD, but interesting note:
-- But even if it is a variable, its type might not match. Consider
-- data T a where
-- T1 :: Int -> T Int
-- T2 :: a -> T a
--
-- f :: T a -> a -> Int
-- f (T1 i) (x::Int) = x
-- f (T2 i) (y::a) = 0
-- Then we must not choose (x::Int) as the matching variable!
-- And nowadays we won't, because the (x::Int) will be wrapped in a CoPat
selectMatchVars :: [Pat Id] -> DsM [Id]
selectMatchVars ps = mapM selectMatchVar ps
selectMatchVar :: Pat Id -> DsM Id
selectMatchVar (BangPat pat) = selectMatchVar (unLoc pat)
selectMatchVar (LazyPat pat) = selectMatchVar (unLoc pat)
selectMatchVar (ParPat pat) = selectMatchVar (unLoc pat)
selectMatchVar (VarPat var) = return (localiseId (unLoc var))
-- Note [Localise pattern binders]
selectMatchVar (AsPat var _) = return (unLoc var)
selectMatchVar other_pat = newSysLocalDs (hsPatType other_pat)
-- OK, better make up one...
{-
Note [Localise pattern binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider module M where
[Just a] = e
After renaming it looks like
module M where
[Just M.a] = e
We don't generalise, since it's a pattern binding, monomorphic, etc,
so after desugaring we may get something like
M.a = case e of (v:_) ->
case v of Just M.a -> M.a
Notice the "M.a" in the pattern; after all, it was in the original
pattern. However, after optimisation those pattern binders can become
let-binders, and then end up floated to top level. They have a
different *unique* by then (the simplifier is good about maintaining
proper scoping), but it's BAD to have two top-level bindings with the
External Name M.a, because that turns into two linker symbols for M.a.
It's quite rare for this to actually *happen* -- the only case I know
of is tc003 compiled with the 'hpc' way -- but that only makes it
all the more annoying.
To avoid this, we craftily call 'localiseId' in the desugarer, which
simply turns the External Name for the Id into an Internal one, but
doesn't change the unique. So the desugarer produces this:
M.a{r8} = case e of (v:_) ->
case v of Just a{r8} -> M.a{r8}
The unique is still 'r8', but the binding site in the pattern
is now an Internal Name. Now the simplifier's usual mechanisms
will propagate that Name to all the occurrence sites, as well as
un-shadowing it, so we'll get
M.a{r8} = case e of (v:_) ->
case v of Just a{s77} -> a{s77}
In fact, even CoreSubst.simplOptExpr will do this, and simpleOptExpr
runs on the output of the desugarer, so all is well by the end of
the desugaring pass.
************************************************************************
* *
* type synonym EquationInfo and access functions for its pieces *
* *
************************************************************************
\subsection[EquationInfo-synonym]{@EquationInfo@: a useful synonym}
The ``equation info'' used by @match@ is relatively complicated and
worthy of a type synonym and a few handy functions.
-}
firstPat :: EquationInfo -> Pat Id
firstPat eqn = ASSERT( notNull (eqn_pats eqn) ) head (eqn_pats eqn)
shiftEqns :: [EquationInfo] -> [EquationInfo]
-- Drop the first pattern in each equation
shiftEqns eqns = [ eqn { eqn_pats = tail (eqn_pats eqn) } | eqn <- eqns ]
-- Functions on MatchResults
matchCanFail :: MatchResult -> Bool
matchCanFail (MatchResult CanFail _) = True
matchCanFail (MatchResult CantFail _) = False
alwaysFailMatchResult :: MatchResult
alwaysFailMatchResult = MatchResult CanFail (\fail -> return fail)
cantFailMatchResult :: CoreExpr -> MatchResult
cantFailMatchResult expr = MatchResult CantFail (\_ -> return expr)
extractMatchResult :: MatchResult -> CoreExpr -> DsM CoreExpr
extractMatchResult (MatchResult CantFail match_fn) _
= match_fn (error "It can't fail!")
extractMatchResult (MatchResult CanFail match_fn) fail_expr = do
(fail_bind, if_it_fails) <- mkFailurePair fail_expr
body <- match_fn if_it_fails
return (mkCoreLet fail_bind body)
combineMatchResults :: MatchResult -> MatchResult -> MatchResult
combineMatchResults (MatchResult CanFail body_fn1)
(MatchResult can_it_fail2 body_fn2)
= MatchResult can_it_fail2 body_fn
where
body_fn fail = do body2 <- body_fn2 fail
(fail_bind, duplicatable_expr) <- mkFailurePair body2
body1 <- body_fn1 duplicatable_expr
return (Let fail_bind body1)
combineMatchResults match_result1@(MatchResult CantFail _) _
= match_result1
adjustMatchResult :: DsWrapper -> MatchResult -> MatchResult
adjustMatchResult encl_fn (MatchResult can_it_fail body_fn)
= MatchResult can_it_fail (\fail -> encl_fn <$> body_fn fail)
adjustMatchResultDs :: (CoreExpr -> DsM CoreExpr) -> MatchResult -> MatchResult
adjustMatchResultDs encl_fn (MatchResult can_it_fail body_fn)
= MatchResult can_it_fail (\fail -> encl_fn =<< body_fn fail)
wrapBinds :: [(Var,Var)] -> CoreExpr -> CoreExpr
wrapBinds [] e = e
wrapBinds ((new,old):prs) e = wrapBind new old (wrapBinds prs e)
wrapBind :: Var -> Var -> CoreExpr -> CoreExpr
wrapBind new old body -- NB: this function must deal with term
| new==old = body -- variables, type variables or coercion variables
| otherwise = Let (NonRec new (varToCoreExpr old)) body
seqVar :: Var -> CoreExpr -> CoreExpr
seqVar var body = Case (Var var) var (exprType body)
[(DEFAULT, [], body)]
mkCoLetMatchResult :: CoreBind -> MatchResult -> MatchResult
mkCoLetMatchResult bind = adjustMatchResult (mkCoreLet bind)
-- (mkViewMatchResult var' viewExpr mr) makes the expression
-- let var' = viewExpr in mr
mkViewMatchResult :: Id -> CoreExpr -> MatchResult -> MatchResult
mkViewMatchResult var' viewExpr =
adjustMatchResult (mkCoreLet (NonRec var' viewExpr))
mkEvalMatchResult :: Id -> Type -> MatchResult -> MatchResult
mkEvalMatchResult var ty
= adjustMatchResult (\e -> Case (Var var) var ty [(DEFAULT, [], e)])
mkGuardedMatchResult :: CoreExpr -> MatchResult -> MatchResult
mkGuardedMatchResult pred_expr (MatchResult _ body_fn)
= MatchResult CanFail (\fail -> do body <- body_fn fail
return (mkIfThenElse pred_expr body fail))
mkCoPrimCaseMatchResult :: Id -- Scrutinee
-> Type -- Type of the case
-> [(Literal, MatchResult)] -- Alternatives
-> MatchResult -- Literals are all unlifted
mkCoPrimCaseMatchResult var ty match_alts
= MatchResult CanFail mk_case
where
mk_case fail = do
alts <- mapM (mk_alt fail) sorted_alts
return (Case (Var var) var ty ((DEFAULT, [], fail) : alts))
sorted_alts = sortWith fst match_alts -- Right order for a Case
mk_alt fail (lit, MatchResult _ body_fn)
= ASSERT( not (litIsLifted lit) )
do body <- body_fn fail
return (LitAlt lit, [], body)
data CaseAlt a = MkCaseAlt{ alt_pat :: a,
alt_bndrs :: [Var],
alt_wrapper :: HsWrapper,
alt_result :: MatchResult }
mkCoAlgCaseMatchResult
:: DynFlags
-> Id -- Scrutinee
-> Type -- Type of exp
-> [CaseAlt DataCon] -- Alternatives (bndrs *include* tyvars, dicts)
-> MatchResult
mkCoAlgCaseMatchResult dflags var ty match_alts
| isNewtype -- Newtype case; use a let
= ASSERT( null (tail match_alts) && null (tail arg_ids1) )
mkCoLetMatchResult (NonRec arg_id1 newtype_rhs) match_result1
| isPArrFakeAlts match_alts
= MatchResult CanFail $ mkPArrCase dflags var ty (sort_alts match_alts)
| otherwise
= mkDataConCase var ty match_alts
where
isNewtype = isNewTyCon (dataConTyCon (alt_pat alt1))
-- [Interesting: because of GADTs, we can't rely on the type of
-- the scrutinised Id to be sufficiently refined to have a TyCon in it]
alt1@MkCaseAlt{ alt_bndrs = arg_ids1, alt_result = match_result1 }
= ASSERT( notNull match_alts ) head match_alts
-- Stuff for newtype
arg_id1 = ASSERT( notNull arg_ids1 ) head arg_ids1
var_ty = idType var
(tc, ty_args) = tcSplitTyConApp var_ty -- Don't look through newtypes
-- (not that splitTyConApp does, these days)
newtype_rhs = unwrapNewTypeBody tc ty_args (Var var)
--- Stuff for parallel arrays
--
-- Concerning `isPArrFakeAlts':
--
-- * it is *not* sufficient to just check the type of the type
-- constructor, as we have to be careful not to confuse the real
-- representation of parallel arrays with the fake constructors;
-- moreover, a list of alternatives must not mix fake and real
-- constructors (this is checked earlier on)
--
-- FIXME: We actually go through the whole list and make sure that
-- either all or none of the constructors are fake parallel
-- array constructors. This is to spot equations that mix fake
-- constructors with the real representation defined in
-- `PrelPArr'. It would be nicer to spot this situation
-- earlier and raise a proper error message, but it can really
-- only happen in `PrelPArr' anyway.
--
isPArrFakeAlts :: [CaseAlt DataCon] -> Bool
isPArrFakeAlts [alt] = isPArrFakeCon (alt_pat alt)
isPArrFakeAlts (alt:alts) =
case (isPArrFakeCon (alt_pat alt), isPArrFakeAlts alts) of
(True , True ) -> True
(False, False) -> False
_ -> panic "DsUtils: you may not mix `[:...:]' with `PArr' patterns"
isPArrFakeAlts [] = panic "DsUtils: unexpectedly found an empty list of PArr fake alternatives"
mkCoSynCaseMatchResult :: Id -> Type -> CaseAlt PatSyn -> MatchResult
mkCoSynCaseMatchResult var ty alt = MatchResult CanFail $ mkPatSynCase var ty alt
sort_alts :: [CaseAlt DataCon] -> [CaseAlt DataCon]
sort_alts = sortWith (dataConTag . alt_pat)
mkPatSynCase :: Id -> Type -> CaseAlt PatSyn -> CoreExpr -> DsM CoreExpr
mkPatSynCase var ty alt fail = do
matcher <- dsLExpr $ mkLHsWrap wrapper $
nlHsTyApp matcher [getRuntimeRep "mkPatSynCase" ty, ty]
let MatchResult _ mkCont = match_result
cont <- mkCoreLams bndrs <$> mkCont fail
return $ mkCoreAppsDs (text "patsyn" <+> ppr var) matcher [Var var, ensure_unstrict cont, Lam voidArgId fail]
where
MkCaseAlt{ alt_pat = psyn,
alt_bndrs = bndrs,
alt_wrapper = wrapper,
alt_result = match_result} = alt
(matcher, needs_void_lam) = patSynMatcher psyn
-- See Note [Matchers and builders for pattern synonyms] in PatSyns
-- on these extra Void# arguments
ensure_unstrict cont | needs_void_lam = Lam voidArgId cont
| otherwise = cont
mkDataConCase :: Id -> Type -> [CaseAlt DataCon] -> MatchResult
mkDataConCase _ _ [] = panic "mkDataConCase: no alternatives"
mkDataConCase var ty alts@(alt1:_) = MatchResult fail_flag mk_case
where
con1 = alt_pat alt1
tycon = dataConTyCon con1
data_cons = tyConDataCons tycon
match_results = map alt_result alts
sorted_alts :: [CaseAlt DataCon]
sorted_alts = sort_alts alts
var_ty = idType var
(_, ty_args) = tcSplitTyConApp var_ty -- Don't look through newtypes
-- (not that splitTyConApp does, these days)
mk_case :: CoreExpr -> DsM CoreExpr
mk_case fail = do
alts <- mapM (mk_alt fail) sorted_alts
return $ mkWildCase (Var var) (idType var) ty (mk_default fail ++ alts)
mk_alt :: CoreExpr -> CaseAlt DataCon -> DsM CoreAlt
mk_alt fail MkCaseAlt{ alt_pat = con,
alt_bndrs = args,
alt_result = MatchResult _ body_fn }
= do { body <- body_fn fail
; case dataConBoxer con of {
Nothing -> return (DataAlt con, args, body) ;
Just (DCB boxer) ->
do { us <- newUniqueSupply
; let (rep_ids, binds) = initUs_ us (boxer ty_args args)
; return (DataAlt con, rep_ids, mkLets binds body) } } }
mk_default :: CoreExpr -> [CoreAlt]
mk_default fail | exhaustive_case = []
| otherwise = [(DEFAULT, [], fail)]
fail_flag :: CanItFail
fail_flag | exhaustive_case
= foldr orFail CantFail [can_it_fail | MatchResult can_it_fail _ <- match_results]
| otherwise
= CanFail
mentioned_constructors = mkUniqSet $ map alt_pat alts
un_mentioned_constructors
= mkUniqSet data_cons `minusUniqSet` mentioned_constructors
exhaustive_case = isEmptyUniqSet un_mentioned_constructors
--- Stuff for parallel arrays
--
-- * the following is to desugar cases over fake constructors for
-- parallel arrays, which are introduced by `tidy1' in the `PArrPat'
-- case
--
mkPArrCase :: DynFlags -> Id -> Type -> [CaseAlt DataCon] -> CoreExpr -> DsM CoreExpr
mkPArrCase dflags var ty sorted_alts fail = do
lengthP <- dsDPHBuiltin lengthPVar
alt <- unboxAlt
return (mkWildCase (len lengthP) intTy ty [alt])
where
elemTy = case splitTyConApp (idType var) of
(_, [elemTy]) -> elemTy
_ -> panic panicMsg
panicMsg = "DsUtils.mkCoAlgCaseMatchResult: not a parallel array?"
len lengthP = mkApps (Var lengthP) [Type elemTy, Var var]
--
unboxAlt = do
l <- newSysLocalDs intPrimTy
indexP <- dsDPHBuiltin indexPVar
alts <- mapM (mkAlt indexP) sorted_alts
return (DataAlt intDataCon, [l], mkWildCase (Var l) intPrimTy ty (dft : alts))
where
dft = (DEFAULT, [], fail)
--
-- each alternative matches one array length (corresponding to one
-- fake array constructor), so the match is on a literal; each
-- alternative's body is extended by a local binding for each
-- constructor argument, which are bound to array elements starting
-- with the first
--
mkAlt indexP alt@MkCaseAlt{alt_result = MatchResult _ bodyFun} = do
body <- bodyFun fail
return (LitAlt lit, [], mkCoreLets binds body)
where
lit = MachInt $ toInteger (dataConSourceArity (alt_pat alt))
binds = [NonRec arg (indexExpr i) | (i, arg) <- zip [1..] (alt_bndrs alt)]
--
indexExpr i = mkApps (Var indexP) [Type elemTy, Var var, mkIntExpr dflags i]
{-
************************************************************************
* *
\subsection{Desugarer's versions of some Core functions}
* *
************************************************************************
-}
mkErrorAppDs :: Id -- The error function
-> Type -- Type to which it should be applied
-> SDoc -- The error message string to pass
-> DsM CoreExpr
mkErrorAppDs err_id ty msg = do
src_loc <- getSrcSpanDs
dflags <- getDynFlags
let
full_msg = showSDoc dflags (hcat [ppr src_loc, vbar, msg])
core_msg = Lit (mkMachString full_msg)
-- mkMachString returns a result of type String#
return (mkApps (Var err_id) [Type (getRuntimeRep "mkErrorAppDs" ty), Type ty, core_msg])
{-
'mkCoreAppDs' and 'mkCoreAppsDs' hand the special-case desugaring of 'seq'.
Note [Desugaring seq (1)] cf Trac #1031
~~~~~~~~~~~~~~~~~~~~~~~~~
f x y = x `seq` (y `seq` (# x,y #))
The [CoreSyn let/app invariant] means that, other things being equal, because
the argument to the outer 'seq' has an unlifted type, we'll use call-by-value thus:
f x y = case (y `seq` (# x,y #)) of v -> x `seq` v
But that is bad for two reasons:
(a) we now evaluate y before x, and
(b) we can't bind v to an unboxed pair
Seq is very, very special! So we recognise it right here, and desugar to
case x of _ -> case y of _ -> (# x,y #)
Note [Desugaring seq (2)] cf Trac #2273
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
let chp = case b of { True -> fst x; False -> 0 }
in chp `seq` ...chp...
Here the seq is designed to plug the space leak of retaining (snd x)
for too long.
If we rely on the ordinary inlining of seq, we'll get
let chp = case b of { True -> fst x; False -> 0 }
case chp of _ { I# -> ...chp... }
But since chp is cheap, and the case is an alluring contet, we'll
inline chp into the case scrutinee. Now there is only one use of chp,
so we'll inline a second copy. Alas, we've now ruined the purpose of
the seq, by re-introducing the space leak:
case (case b of {True -> fst x; False -> 0}) of
I# _ -> ...case b of {True -> fst x; False -> 0}...
We can try to avoid doing this by ensuring that the binder-swap in the
case happens, so we get his at an early stage:
case chp of chp2 { I# -> ...chp2... }
But this is fragile. The real culprit is the source program. Perhaps we
should have said explicitly
let !chp2 = chp in ...chp2...
But that's painful. So the code here does a little hack to make seq
more robust: a saturated application of 'seq' is turned *directly* into
the case expression, thus:
x `seq` e2 ==> case x of x -> e2 -- Note shadowing!
e1 `seq` e2 ==> case x of _ -> e2
So we desugar our example to:
let chp = case b of { True -> fst x; False -> 0 }
case chp of chp { I# -> ...chp... }
And now all is well.
The reason it's a hack is because if you define mySeq=seq, the hack
won't work on mySeq.
Note [Desugaring seq (3)] cf Trac #2409
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The isLocalId ensures that we don't turn
True `seq` e
into
case True of True { ... }
which stupidly tries to bind the datacon 'True'.
-}
mkCoreAppDs :: SDoc -> CoreExpr -> CoreExpr -> CoreExpr
mkCoreAppDs _ (Var f `App` Type ty1 `App` Type ty2 `App` arg1) arg2
| f `hasKey` seqIdKey -- Note [Desugaring seq (1), (2)]
= Case arg1 case_bndr ty2 [(DEFAULT,[],arg2)]
where
case_bndr = case arg1 of
Var v1 | isLocalId v1 -> v1 -- Note [Desugaring seq (2) and (3)]
_ -> mkWildValBinder ty1
mkCoreAppDs s fun arg = mkCoreApp s fun arg -- The rest is done in MkCore
mkCoreAppsDs :: SDoc -> CoreExpr -> [CoreExpr] -> CoreExpr
mkCoreAppsDs s fun args = foldl (mkCoreAppDs s) fun args
mkCastDs :: CoreExpr -> Coercion -> CoreExpr
-- We define a desugarer-specific version of CoreUtils.mkCast,
-- because in the immediate output of the desugarer, we can have
-- apparently-mis-matched coercions: E.g.
-- let a = b
-- in (x :: a) |> (co :: b ~ Int)
-- Lint know about type-bindings for let and does not complain
-- So here we do not make the assertion checks that we make in
-- CoreUtils.mkCast; and we do less peephole optimisation too
mkCastDs e co | isReflCo co = e
| otherwise = Cast e co
{-
************************************************************************
* *
Tuples and selector bindings
* *
************************************************************************
This is used in various places to do with lazy patterns.
For each binder $b$ in the pattern, we create a binding:
\begin{verbatim}
b = case v of pat' -> b'
\end{verbatim}
where @pat'@ is @pat@ with each binder @b@ cloned into @b'@.
ToDo: making these bindings should really depend on whether there's
much work to be done per binding. If the pattern is complex, it
should be de-mangled once, into a tuple (and then selected from).
Otherwise the demangling can be in-line in the bindings (as here).
Boring! Boring! One error message per binder. The above ToDo is
even more helpful. Something very similar happens for pattern-bound
expressions.
Note [mkSelectorBinds]
~~~~~~~~~~~~~~~~~~~~~~
mkSelectorBinds is used to desugar a pattern binding {p = e},
in a binding group:
let { ...; p = e; ... } in body
where p binds x,y (this list of binders can be empty).
There are two cases.
General case (A).
In the general case we generate these bindings (A)
{ t = case e of p -> (x,y)
; x = case t of (x,y) -> x
; y = case t of (x,y) -> y }
and we return 't' as the variable to force if the pattern
is strict. So with -XStrict or an outermost-bang-pattern, the binding
let p = e in body
would turn into
let { t = case e of p -> (x,y)
; x = case t of (x,y) -> x
; y = case t of (x,y) -> y }
in t `seq` t
Special case (B).
For a pattern that is essentially just a tuple:
* A product type, so cannot fail
* Only one level, so that
- generating multiple matches is fine
- seq'ing it evaluates the same as matching it
Then instead we generate
{ v = e
; x = case v of p -> x
; y = case v of p -> y }
with 'v' as the variable to force
Examples:
* !(_, (_, a)) = e
==>
t = case e of (_, (_, a)) -> Unit a
a = case t of Unit a -> a
Note that
- Forcing 't' will force the pattern to match fully;
e.g. will diverge if (snd e) is bottom
- But 'a' itself is not forced; it is wrapped in a one-tuple
(see Note [One-tuples] in TysWiredIn)
* !(Just x) = e
==>
t = case e of Just x -> Unit x
x = case t of Unit x -> x
Again, forcing 't' will fail if 'e' yields Nothing.
Note that even though this is rather general, the special cases
work out well:
* One binder, not -XStrict:
let Just (Just v) = e in body
==>
let t = case e of Just (Just v) -> Unit v
v = case t of Unit v -> v
in body
==>
let v = case (case e of Just (Just v) -> Unit v) of
Unit v -> v
in body
==>
let v = case e of Just (Just v) -> v
in body
* Non-recursive, -XStrict
let p = e in body
==>
let { t = case e of p -> (x,y)
; x = case t of (x,y) -> x
; y = case t of (x,y) -> x }
in t `seq` body
==> {inline seq, float x,y bindings inwards}
let t = case e of p -> (x,y) in
case t of t' ->
let { x = case t' of (x,y) -> x
; y = case t' of (x,y) -> x } in
body
==> {inline t, do case of case}
case e of p ->
let t = (x,y) in
let { x = case t' of (x,y) -> x
; y = case t' of (x,y) -> x } in
body
==> {case-cancellation, drop dead code}
case e of p -> body
* Special case (B) is there to avoid fruitlessly taking the tuple
apart and rebuilding it. For example, consider
{ K x y = e }
where K is a product constructor. Then general case (A) does:
{ t = case e of K x y -> (x,y)
; x = case t of (x,y) -> x
; y = case t of (x,y) -> y }
In the lazy case we can't optimise out this fruitless taking apart
and rebuilding. Instead (B) builds
{ v = e
; x = case v of K x y -> x
; y = case v of K x y -> y }
which is better.
-}
mkSelectorBinds :: [[Tickish Id]] -- ^ ticks to add, possibly
-> LPat Id -- ^ The pattern
-> CoreExpr -- ^ Expression to which the pattern is bound
-> DsM (Id,[(Id,CoreExpr)])
-- ^ Id the rhs is bound to, for desugaring strict
-- binds (see Note [Desugar Strict binds] in DsBinds)
-- and all the desugared binds
mkSelectorBinds ticks pat val_expr
| is_simple_lpat pat -- Special case (B)
= do { let pat_ty = hsLPatType pat
; val_var <- newSysLocalDs pat_ty
; let mk_bind scrut_var tick bndr_var
-- (mk_bind sv bv) generates bv = case sv of { pat -> bv }
-- Remember, 'pat' binds 'bv'
= do { rhs_expr <- matchSimply (Var scrut_var) PatBindRhs pat
(Var bndr_var)
(Var bndr_var) -- Neat hack
-- Neat hack: since 'pat' can't fail, the
-- "fail-expr" passed to matchSimply is not
-- used. But it /is/ used for its type, and for
-- that bndr_var is just the ticket.
; return (bndr_var, mkOptTickBox tick rhs_expr) }
; binds <- zipWithM (mk_bind val_var) ticks' binders
; return ( val_var, (val_var, val_expr) : binds) }
| otherwise
= do { tuple_var <- newSysLocalDs tuple_ty
; error_expr <- mkErrorAppDs iRREFUT_PAT_ERROR_ID tuple_ty (ppr pat)
; tuple_expr <- matchSimply val_expr PatBindRhs pat
local_tuple error_expr
; let mk_tup_bind tick binder
= (binder, mkOptTickBox tick $
mkTupleSelector1 local_binders binder
tuple_var (Var tuple_var))
tup_binds = zipWith mk_tup_bind ticks' binders
; return (tuple_var, (tuple_var, tuple_expr) : tup_binds) }
where
binders = collectPatBinders pat
ticks' = ticks ++ repeat []
local_binders = map localiseId binders -- See Note [Localise pattern binders]
local_tuple = mkBigCoreVarTup1 binders
tuple_ty = exprType local_tuple
is_simple_lpat :: LPat a -> Bool
is_simple_lpat p = is_simple_pat (unLoc p)
is_simple_pat :: Pat a -> Bool
is_simple_pat (VarPat _) = True
is_simple_pat (ParPat p) = is_simple_lpat p
is_simple_pat (TuplePat ps Boxed _) = all is_triv_lpat ps
is_simple_pat (ConPatOut { pat_con = con
, pat_args = ps}) = is_simple_con_pat con ps
is_simple_pat _ = False
is_simple_con_pat :: Located ConLike -> HsConPatDetails a -> Bool
is_simple_con_pat con args
= case con of
L _ (RealDataCon con) -> isProductTyCon (dataConTyCon con)
&& all is_triv_lpat (hsConPatArgs args)
L _ (PatSynCon {}) -> False
is_triv_lpat :: LPat a -> Bool
is_triv_lpat p = is_triv_pat (unLoc p)
is_triv_pat :: Pat a -> Bool
is_triv_pat (VarPat _) = True
is_triv_pat (WildPat _) = True
is_triv_pat (ParPat p) = is_triv_lpat p
is_triv_pat _ = False
{- *********************************************************************
* *
Creating big tuples and their types for full Haskell expressions.
They work over *Ids*, and create tuples replete with their types,
which is whey they are not in HsUtils.
* *
********************************************************************* -}
mkLHsPatTup :: [LPat Id] -> LPat Id
mkLHsPatTup [] = noLoc $ mkVanillaTuplePat [] Boxed
mkLHsPatTup [lpat] = lpat
mkLHsPatTup lpats = L (getLoc (head lpats)) $
mkVanillaTuplePat lpats Boxed
mkLHsVarPatTup :: [Id] -> LPat Id
mkLHsVarPatTup bs = mkLHsPatTup (map nlVarPat bs)
mkVanillaTuplePat :: [OutPat Id] -> Boxity -> Pat Id
-- A vanilla tuple pattern simply gets its type from its sub-patterns
mkVanillaTuplePat pats box = TuplePat pats box (map hsLPatType pats)
-- The Big equivalents for the source tuple expressions
mkBigLHsVarTupId :: [Id] -> LHsExpr Id
mkBigLHsVarTupId ids = mkBigLHsTupId (map nlHsVar ids)
mkBigLHsTupId :: [LHsExpr Id] -> LHsExpr Id
mkBigLHsTupId = mkChunkified mkLHsTupleExpr
-- The Big equivalents for the source tuple patterns
mkBigLHsVarPatTupId :: [Id] -> LPat Id
mkBigLHsVarPatTupId bs = mkBigLHsPatTupId (map nlVarPat bs)
mkBigLHsPatTupId :: [LPat Id] -> LPat Id
mkBigLHsPatTupId = mkChunkified mkLHsPatTup
{-
************************************************************************
* *
Code for pattern-matching and other failures
* *
************************************************************************
Generally, we handle pattern matching failure like this: let-bind a
fail-variable, and use that variable if the thing fails:
\begin{verbatim}
let fail.33 = error "Help"
in
case x of
p1 -> ...
p2 -> fail.33
p3 -> fail.33
p4 -> ...
\end{verbatim}
Then
\begin{itemize}
\item
If the case can't fail, then there'll be no mention of @fail.33@, and the
simplifier will later discard it.
\item
If it can fail in only one way, then the simplifier will inline it.
\item
Only if it is used more than once will the let-binding remain.
\end{itemize}
There's a problem when the result of the case expression is of
unboxed type. Then the type of @fail.33@ is unboxed too, and
there is every chance that someone will change the let into a case:
\begin{verbatim}
case error "Help" of
fail.33 -> case ....
\end{verbatim}
which is of course utterly wrong. Rather than drop the condition that
only boxed types can be let-bound, we just turn the fail into a function
for the primitive case:
\begin{verbatim}
let fail.33 :: Void -> Int#
fail.33 = \_ -> error "Help"
in
case x of
p1 -> ...
p2 -> fail.33 void
p3 -> fail.33 void
p4 -> ...
\end{verbatim}
Now @fail.33@ is a function, so it can be let-bound.
-}
mkFailurePair :: CoreExpr -- Result type of the whole case expression
-> DsM (CoreBind, -- Binds the newly-created fail variable
-- to \ _ -> expression
CoreExpr) -- Fail variable applied to realWorld#
-- See Note [Failure thunks and CPR]
mkFailurePair expr
= do { fail_fun_var <- newFailLocalDs (voidPrimTy `mkFunTy` ty)
; fail_fun_arg <- newSysLocalDs voidPrimTy
; let real_arg = setOneShotLambda fail_fun_arg
; return (NonRec fail_fun_var (Lam real_arg expr),
App (Var fail_fun_var) (Var voidPrimId)) }
where
ty = exprType expr
{-
Note [Failure thunks and CPR]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we make a failure point we ensure that it
does not look like a thunk. Example:
let fail = \rw -> error "urk"
in case x of
[] -> fail realWorld#
(y:ys) -> case ys of
[] -> fail realWorld#
(z:zs) -> (y,z)
Reason: we know that a failure point is always a "join point" and is
entered at most once. Adding a dummy 'realWorld' token argument makes
it clear that sharing is not an issue. And that in turn makes it more
CPR-friendly. This matters a lot: if you don't get it right, you lose
the tail call property. For example, see Trac #3403.
************************************************************************
* *
Ticks
* *
********************************************************************* -}
mkOptTickBox :: [Tickish Id] -> CoreExpr -> CoreExpr
mkOptTickBox = flip (foldr Tick)
mkBinaryTickBox :: Int -> Int -> CoreExpr -> DsM CoreExpr
mkBinaryTickBox ixT ixF e = do
uq <- newUnique
this_mod <- getModule
let bndr1 = mkSysLocal (fsLit "t1") uq boolTy
let
falseBox = Tick (HpcTick this_mod ixF) (Var falseDataConId)
trueBox = Tick (HpcTick this_mod ixT) (Var trueDataConId)
--
return $ Case e bndr1 boolTy
[ (DataAlt falseDataCon, [], falseBox)
, (DataAlt trueDataCon, [], trueBox)
]
-- *******************************************************************
-- | Remove any bang from a pattern and say if it is a strict bind,
-- also make irrefutable patterns ordinary patterns if -XStrict.
--
-- Examples:
-- ~pat => False, pat -- when -XStrict
-- -- even if pat = ~pat'
-- ~pat => False, ~pat -- without -XStrict
-- ~(~pat) => False, ~pat -- when -XStrict
-- pat => True, pat -- when -XStrict
-- !pat => True, pat -- always
decideBangHood :: DynFlags
-> LPat id -- ^ Original pattern
-> LPat id -- Pattern with bang if necessary
decideBangHood dflags lpat
= go lpat
where
xstrict = xopt LangExt.Strict dflags
go lp@(L l p)
= case p of
ParPat p -> L l (ParPat (go p))
LazyPat lp' | xstrict -> lp'
BangPat _ -> lp
_ | xstrict -> L l (BangPat lp)
| otherwise -> lp
|
tjakway/ghcjvm
|
compiler/deSugar/DsUtils.hs
|
bsd-3-clause
| 35,951 | 0 | 19 | 10,432 | 5,338 | 2,780 | 2,558 | -1 | -1 |
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeOperators #-}
module T14038 where
import Data.Kind (Type)
data family Sing (a :: k)
data instance Sing (z :: [a]) where
SNil :: Sing '[]
SCons :: Sing x -> Sing xs -> Sing (x:xs)
data TyFun :: Type -> Type -> Type
type a ~> b = TyFun a b -> Type
infixr 0 ~>
type family Apply (f :: k1 ~> k2) (x :: k1) :: k2
type a @@ b = Apply a b
infixl 9 @@
data FunArrow = (:->) -- ^ '(->)'
| (:~>) -- ^ '(~>)'
class FunType (arr :: FunArrow) where
type Fun (k1 :: Type) arr (k2 :: Type) :: Type
class FunType arr => AppType (arr :: FunArrow) where
type App k1 arr k2 (f :: Fun k1 arr k2) (x :: k1) :: k2
type FunApp arr = (FunType arr, AppType arr)
instance FunType (:->) where
type Fun k1 (:->) k2 = k1 -> k2
instance AppType (:->) where
type App k1 (:->) k2 (f :: k1 -> k2) x = f x
instance FunType (:~>) where
type Fun k1 (:~>) k2 = k1 ~> k2
instance AppType (:~>) where
type App k1 (:~>) k2 (f :: k1 ~> k2) x = f @@ x
infixr 0 -?>
type (-?>) (k1 :: Type) (k2 :: Type) (arr :: FunArrow) = Fun k1 arr k2
elimList :: forall (a :: Type) (p :: [a] -> Type) (l :: [a]).
Sing l
-> p '[]
-> (forall (x :: a) (xs :: [a]). Sing x -> Sing xs -> p xs -> p (x:xs))
-> p l
elimList = elimListPoly @(:->)
elimListTyFun :: forall (a :: Type) (p :: [a] ~> Type) (l :: [a]).
Sing l
-> p @@ '[]
-> (forall (x :: a) (xs :: [a]). Sing x -> Sing xs -> p @@ xs -> p @@ (x:xs))
-> p @@ l
elimListTyFun = elimListPoly @(:~>) @_ @p
elimListPoly :: forall (arr :: FunArrow) (a :: Type) (p :: ([a] -?> Type) arr) (l :: [a]).
FunApp arr
=> Sing l
-> App [a] arr Type p '[]
-> (forall (x :: a) (xs :: [a]). Sing x -> Sing xs -> App [a] arr Type p xs -> App [a] arr Type p (x:xs))
-> App [a] arr Type p l
elimListPoly SNil pNil _ = pNil
elimListPoly (SCons x (xs :: Sing xs)) pNil pCons = pCons x xs (elimListPoly @arr @a @p @xs xs pNil pCons)
|
sdiehl/ghc
|
testsuite/tests/dependent/should_compile/T14038.hs
|
bsd-3-clause
| 2,379 | 22 | 20 | 691 | 1,000 | 575 | 425 | -1 | -1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeApplications #-}
--------------- The original bug report --------------
--
-- See T12734a for a smaller version
module T12734 where
import Prelude
import Control.Applicative
import Control.Monad.Fix
import Control.Monad.Trans.Identity
import Control.Monad.Trans.Class
import Control.Monad.IO.Class
data A
data B
data Net
data Type
data Layer4 t l
data TermStore
-- Helpers: Stack
data Stack layers (t :: * -> *) where
SLayer :: t l -> Stack ls t -> Stack (l ': ls) t
SNull :: Stack '[] t
instance ( Constructor m (t l)
, Constructor m (Stack ls t)) => Constructor m (Stack (l ': ls) t)
instance Monad m => Constructor m (Stack '[] t)
-- Helpers: Expr
newtype Expr t layers = Expr (TermStack t layers)
type TermStack t layers = Stack layers (Layer4 (Expr t layers))
-- Helpers: Funny typeclass
class Monad m => Constructor m t
instance ( Monad m, expr ~ Expr t layers, Constructor m (TermStack t layers)
) => Constructor m (Layer4 expr Type)
-- HERE IS A FUNNY BEHAVIOR: the commented line raises context reduction stack overflow
test_gr :: ( Constructor m (TermStack t layers), Inferable A layers m, Inferable B t m
, bind ~ Expr t layers
-- ) => m (Expr t layers)
) => m bind
test_gr = undefined
-- Explicit information about a type which could be inferred
class Monad m => Inferable (cls :: *) (t :: k) m | cls m -> t
newtype KnownTypex (cls :: *) (t :: k) (m :: * -> *) (a :: *) = KnownTypex (IdentityT m a) deriving (Show, Functor, Monad, MonadIO, MonadFix, MonadTrans, Applicative, Alternative)
instance {-# OVERLAPPABLE #-} (t ~ t', Monad m) => Inferable cls t (KnownTypex cls t' m)
instance {-# OVERLAPPABLE #-} (Inferable cls t n, MonadTrans m, Monad (m n)) => Inferable cls t (m n)
runInferenceTx :: forall cls t m a. KnownTypex cls t m a -> m a
runInferenceTx = undefined
-- running it
test_ghc_err :: (MonadIO m, MonadFix m)
=> m (Expr Net '[Type])
test_ghc_err = runInferenceTx @B @Net
$ runInferenceTx @A @'[Type]
$ (test_gr)
|
ezyang/ghc
|
testsuite/tests/typecheck/should_compile/T12734.hs
|
bsd-3-clause
| 2,628 | 0 | 10 | 632 | 733 | 409 | 324 | -1 | -1 |
{-# LANGUAGE GADTs, RankNTypes #-}
-- Test pattern bindings, existentials, and higher rank
module T12427a where
data T where
T1 :: a -> ((forall b. [b]->[b]) -> Int) -> T
T2 :: ((forall b. [b]->[b]) -> Int) -> T
-- Inference
-- Worked in 7.10 (probably wrongly)
-- Failed in 8.0.1
-- Fails in 8.2 because v is polymorphic
-- and the T1 pattern match binds existentials,
-- and hence bumps levels
h11 y = case y of T1 _ v -> v
-- Worked in 7.10 (probably wrongly)
-- Failed in 8.0.1
-- Succeeds in 8.2 because the pattern match has
-- no existentials, so it doesn't matter than
-- v is polymorphic
h12 y = case y of T2 v -> v
-- Inference
-- Same results as for h11 and h12 resp
T1 _ x1 = undefined
T2 x2 = undefined
-- Works in all versions
h2 :: T -> (forall b. [b] -> [b]) -> Int
h2 y = case y of T1 _ v -> v
-- Checking
-- Fails in 7.10 (head exploded)
-- Fails in 8.0.1 (ditto)
-- Succeeds in 8.2
x3 :: (forall a. a->a) -> Int
T1 _ x3 = undefined
|
ezyang/ghc
|
testsuite/tests/typecheck/should_compile/T12427a.hs
|
bsd-3-clause
| 976 | 0 | 13 | 225 | 250 | 144 | 106 | 13 | 1 |
-- -----------------------------------------------------------------------------
--
-- (c) The University of Glasgow 2012
--
-- Monadic streams
--
-- -----------------------------------------------------------------------------
{-# LANGUAGE CPP #-}
module Stream (
Stream(..), yield, liftIO,
collect, fromList,
Stream.map, Stream.mapM, Stream.mapAccumL
) where
import Control.Monad
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative
#endif
-- |
-- @Stream m a b@ is a computation in some Monad @m@ that delivers a sequence
-- of elements of type @a@ followed by a result of type @b@.
--
-- More concretely, a value of type @Stream m a b@ can be run using @runStream@
-- in the Monad @m@, and it delivers either
--
-- * the final result: @Left b@, or
-- * @Right (a,str)@, where @a@ is the next element in the stream, and @str@
-- is a computation to get the rest of the stream.
--
-- Stream is itself a Monad, and provides an operation 'yield' that
-- produces a new element of the stream. This makes it convenient to turn
-- existing monadic computations into streams.
--
-- The idea is that Stream is useful for making a monadic computation
-- that produces values from time to time. This can be used for
-- knitting together two complex monadic operations, so that the
-- producer does not have to produce all its values before the
-- consumer starts consuming them. We make the producer into a
-- Stream, and the consumer pulls on the stream each time it wants a
-- new value.
--
newtype Stream m a b = Stream { runStream :: m (Either b (a, Stream m a b)) }
instance Monad f => Functor (Stream f a) where
fmap = liftM
instance Monad m => Applicative (Stream m a) where
pure = return
(<*>) = ap
instance Monad m => Monad (Stream m a) where
return a = Stream (return (Left a))
Stream m >>= k = Stream $ do
r <- m
case r of
Left b -> runStream (k b)
Right (a,str) -> return (Right (a, str >>= k))
yield :: Monad m => a -> Stream m a ()
yield a = Stream (return (Right (a, return ())))
liftIO :: IO a -> Stream IO b a
liftIO io = Stream $ io >>= return . Left
-- | Turn a Stream into an ordinary list, by demanding all the elements.
collect :: Monad m => Stream m a () -> m [a]
collect str = go str []
where
go str acc = do
r <- runStream str
case r of
Left () -> return (reverse acc)
Right (a, str') -> go str' (a:acc)
-- | Turn a list into a 'Stream', by yielding each element in turn.
fromList :: Monad m => [a] -> Stream m a ()
fromList = mapM_ yield
-- | Apply a function to each element of a 'Stream', lazily
map :: Monad m => (a -> b) -> Stream m a x -> Stream m b x
map f str = Stream $ do
r <- runStream str
case r of
Left x -> return (Left x)
Right (a, str') -> return (Right (f a, Stream.map f str'))
-- | Apply a monadic operation to each element of a 'Stream', lazily
mapM :: Monad m => (a -> m b) -> Stream m a x -> Stream m b x
mapM f str = Stream $ do
r <- runStream str
case r of
Left x -> return (Left x)
Right (a, str') -> do
b <- f a
return (Right (b, Stream.mapM f str'))
-- | analog of the list-based 'mapAccumL' on Streams. This is a simple
-- way to map over a Stream while carrying some state around.
mapAccumL :: Monad m => (c -> a -> m (c,b)) -> c -> Stream m a ()
-> Stream m b c
mapAccumL f c str = Stream $ do
r <- runStream str
case r of
Left () -> return (Left c)
Right (a, str') -> do
(c',b) <- f c a
return (Right (b, mapAccumL f c' str'))
|
urbanslug/ghc
|
compiler/utils/Stream.hs
|
bsd-3-clause
| 3,608 | 0 | 18 | 905 | 1,022 | 526 | 496 | 56 | 2 |
-- Test grouping with both a using and a by clause
{-# OPTIONS_GHC -XMonadComprehensions -XTransformListComp #-}
module Main where
import Data.List(groupBy)
import GHC.Exts(the)
groupRuns :: Eq b => (a -> b) -> [a] -> [[a]]
groupRuns f = groupBy (\x y -> f x == f y)
main = putStrLn (show output)
where
output = [ (the x, product y)
| x <- ([1, 1, 1, 2, 2, 1, 3])
, y <- [4..6]
, then group by x using groupRuns ]
|
siddhanathan/ghc
|
testsuite/tests/deSugar/should_run/mc06.hs
|
bsd-3-clause
| 464 | 0 | 11 | 132 | 183 | 103 | 80 | 11 | 1 |
{-# LANGUAGE PackageImports #-}
module Rocketfuel.Input (
keyIsPressed,
readMouse,
Click (..),
MouseStatus (..),
updateCommand
) where
import "GLFW-b" Graphics.UI.GLFW as GLFW
import Control.Monad
import Rocketfuel.Types
type Coords = (Integer, Integer)
data MouseStatus = Clicked | Released
data Click = Click { _xy :: (Double, Double),
_status :: MouseStatus }
-- Main updater
updateCommand :: Click -> Maybe Command -> Maybe Command
updateCommand click comm = readClick click . cleanCommand $ comm
where
cleanCommand :: Maybe Command -> Maybe Command
cleanCommand (Just (DragAndDrop (Just _) (Just _))) = Nothing
cleanCommand other = other
readClick :: Click -> Maybe Command -> Maybe Command
readClick (Click (x, y) status) com =
if legit coords then updateCommand' status
else undragIfNeeded status
where
coords = cellFromCoord x y
updateCommand' :: MouseStatus -> Maybe Command
updateCommand' Clicked = dragIfNeeded coords com
updateCommand' Released = dropIfNeeded coords com
undragIfNeeded Released = Nothing
undragIfNeeded Clicked = com
-- Signal input management
--
readMouse :: Window -> (Click -> IO b) -> IO b
readMouse window sink = do
pollEvents
mousePos <- getCursorPos window
mouseIsPressed <- checkMouseButtonStatus window MouseButtonState'Pressed
if mouseIsPressed then sink $ Click mousePos Clicked
else sink $ Click mousePos Released
checkMouseButtonStatus :: Window -> MouseButtonState -> IO Bool
checkMouseButtonStatus win st = liftM (st ==) (getMouseButton win MouseButton'1)
keyIsPressed :: Window -> Key -> IO Bool
keyIsPressed win key = isPress `fmap` GLFW.getKey win key
isPress :: KeyState -> Bool
isPress KeyState'Pressed = True
isPress KeyState'Repeating = True
isPress _ = False
-- Logic for drag'n'drop
-- Called when the mouse is down.
-- If there is no given tile being dragged,
-- given a mouse input expressed in x and y,
-- check if it fits in the grid; if so, start
-- dragging this tile by modifying GameContext.
dragIfNeeded :: Coords -> Maybe Command -> Maybe Command
dragIfNeeded coords Nothing = Just $ DragAndDrop (Just coords) Nothing
dragIfNeeded _ c = c
dropIfNeeded :: Coords -> Maybe Command -> Maybe Command
dropIfNeeded coords (Just (DragAndDrop (Just p) Nothing))
= Just $ DragAndDrop (Just p) (Just coords)
dropIfNeeded _ c = c
-- Given a x, y index, check it can be a position on the grid.
legit :: (Num a, Ord a) => (a, a) -> Bool
legit (x, y) = x < 8 && x >= 0 && y < 8 && y >= 0
-- Given a global mouse input expressed in gloss viewport,
-- convert it to a 0,0 coord system on the grid.
cellFromCoord :: Double -> Double -> Coords
cellFromCoord x y = (floor (x / 32.0), abs $ floor (y / 32.0))
|
Raveline/Rocketfuel
|
src/Rocketfuel/Input.hs
|
mit
| 2,938 | 0 | 13 | 720 | 801 | 421 | 380 | 55 | 5 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
module Impl.TVar1 where
import Control.Concurrent.STM
import Control.Monad.IO.Class
import Control.Monad.Trans.Either
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as Map
import Data.Proxy
import Data.Time
import Network.Wai.Handler.Warp (run)
import Servant.API
import Servant.Server
import API
data Environment
= Environment
{ serverFortuneMap :: TVar (HashMap Email (TVar Fortune))
}
runApp :: IO ()
runApp = do
env <- createEnvironment
run 8080 $ serve (Proxy :: Proxy API)
( createPlayer env
:<|> tradeFortunes env)
createEnvironment :: IO Environment
createEnvironment = do
fm <- newTVarIO Map.empty
return $ Environment fm
createPlayer :: Environment -> NewPlayer -> EitherT ServantErr IO ()
createPlayer env player = do
now <- liftIO getCurrentTime
liftIO $ atomically $ do
f <- newTVar (Fortune (npFortune player) now)
m <- readTVar (serverFortuneMap env)
let m' = Map.insert (npEmail player) f m
writeTVar (serverFortuneMap env) m'
tradeFortunes :: Environment -> Email -> Email -> EitherT ServantErr IO FortunePair
tradeFortunes env from to = do
res <- liftIO $ atomically $ do
m <- readTVar (serverFortuneMap env)
let mgivenv = Map.lookup from m
mreceivedv = Map.lookup to m
case (mgivenv, mreceivedv) of
(Just givenv, Just receivedv) -> do
given <- readTVar givenv
received <- readTVar receivedv
writeTVar givenv received
writeTVar receivedv given
return $ Just $ FortunePair given received
_ -> return Nothing
case res of
Just v -> return v
Nothing -> left err404
|
AndrewRademacher/whimsy
|
src/Impl/TVar1.hs
|
mit
| 2,001 | 0 | 17 | 632 | 556 | 276 | 280 | 53 | 3 |
module Main where
import Control.Monad (forever)
import Data.Char (toLower)
import Data.List (intersperse, nub)
import Data.Maybe (isJust)
import System.Exit (exitSuccess)
import System.Random (randomRIO)
main :: IO ()
main = do
word <- randomWord'
let puzzle = freshPuzzle (fmap toLower word)
runGame puzzle
type WordList = [String]
minWordLength :: Int
minWordLength = 5
maxWordLength :: Int
maxWordLength = 9
allWords :: IO WordList
allWords = do
dict <- readFile "data/dict.txt"
return (lines dict)
gameWords :: IO WordList
gameWords = do
words <- allWords
return (filter gameLength words)
where
gameLength word =
let l = length word
in l > minWordLength && l < maxWordLength
randomWord :: WordList -> IO String
randomWord words = do
randomIndex <- randomRIO (0, length words)
return $ words !! randomIndex
randomWord' :: IO String
randomWord' = gameWords >>= randomWord
data Puzzle = Puzzle String [Maybe Char] [Char]
instance Show Puzzle where
show (Puzzle _ discovered guessed) =
(intersperse ' ' $ fmap renderPuzzleChar discovered)
++ " Guessed so far: "
++ guessed
freshPuzzle :: String -> Puzzle
freshPuzzle word = Puzzle word (map (const Nothing) word) []
charInWord :: Puzzle -> Char -> Bool
charInWord (Puzzle word _ _) c =
c `elem` word
alreadyGuessed :: Puzzle -> Char -> Bool
alreadyGuessed (Puzzle _ _ guessed) c =
c `elem` guessed
renderPuzzleChar :: Maybe Char -> Char
renderPuzzleChar Nothing = '_'
renderPuzzleChar (Just c) = c
fillInCharacter :: Puzzle -> Char -> Puzzle
fillInCharacter (Puzzle word filledInSoFar guessed) c =
Puzzle word newFilledInSoFar (c : guessed)
where
zipper :: Char -> Char -> Maybe Char -> Maybe Char
zipper guessed wordChar guessChar =
if wordChar == guessed
then Just wordChar
else guessChar
newFilledInSoFar =
zipWith (zipper c) word filledInSoFar
handleGuess :: Puzzle -> Char -> IO Puzzle
handleGuess puzzle guess = do
putStrLn $ "Your guess was: " ++ [guess]
case (charInWord puzzle guess, alreadyGuessed puzzle guess) of
(_, True) -> do
putStrLn "You already guessed that letter."
return puzzle
(True, _) -> do
putStrLn "This character was in the word."
return (fillInCharacter puzzle guess)
(False, _) -> do
putStrLn "This character was not in the word."
putStrLn ""
return (fillInCharacter puzzle guess)
gameOver :: Puzzle -> IO ()
gameOver p@(Puzzle word _ _) =
if tooManyGuesses p then
do
putStrLn "You lost!"
putStrLn $ "The word was: " ++ word
exitSuccess
else return ()
tooManyGuesses :: Puzzle -> Bool
tooManyGuesses (Puzzle _ filledInSoFar guessed) =
length guessed - length uniqueFilledIn > 7
where
filledIn = filter isJust filledInSoFar
uniqueFilledIn = nub filledIn
gameWin :: Puzzle -> IO ()
gameWin (Puzzle _ filledInSoFar _) =
if all isJust filledInSoFar then
do
putStrLn "You win!"
exitSuccess
else return ()
runGame :: Puzzle -> IO ()
runGame puzzle = forever $ do
gameOver puzzle
gameWin puzzle
putStrLn $ "Current puzzle is: " ++ show puzzle
putStr "Guess a letter: "
guess <- getLine
case guess of
[c] -> handleGuess puzzle c >>= runGame
_ -> putStrLn "Your guess must be a single character."
|
mikegehard/haskellBookExercises
|
hangman/src/Main.hs
|
mit
| 3,624 | 0 | 14 | 1,050 | 1,103 | 547 | 556 | 105 | 3 |
module Main where
import Prelude hiding (catch)
import Control.Exception ( AsyncException(..), catch )
import Control.Monad.Error
import Data.Version
import Data.List
import System.IO
import System.Environment
import System.Directory (getHomeDirectory)
import System.FilePath ((</>))
import System.Console.Haskeline hiding (handle, catch, throwTo)
import System.Console.GetOpt
import System.Exit (ExitCode (..), exitWith, exitFailure)
import Language.Egison
import Language.Egison.Util
import qualified Paths_egison_tutorial as P
main :: IO ()
main = do args <- getArgs
let (actions, nonOpts, _) = getOpt Permute options args
let opts = foldl (flip id) defaultOptions actions
case opts of
Options {optShowSections = True} -> putStrLn $ show tutorial
Options {optSection = Just sn, optSubSection = Just ssn} -> do
let sn' = (read sn) :: Int
let ssn' = (read ssn) :: Int
let ret = case tutorial of
Tutorial ss ->
if 0 < sn' && sn' <= length ss
then case nth sn' ss of
Section _ cs ->
if 0 < ssn' && ssn' <= length cs
then showContent $ nth ssn' cs
else "error: content out of range"
else "error: section out of range"
putStrLn ret
Options {optShowHelp = True} -> printHelp
Options {optShowVersion = True} -> printVersionNumber
Options {optPrompt = prompt} -> do
env <- initialEnv
case nonOpts of
[] -> showBanner >> repl env prompt
_ -> printHelp
data Options = Options {
optShowVersion :: Bool,
optShowHelp :: Bool,
optPrompt :: String,
optShowSections :: Bool,
optSection :: Maybe String,
optSubSection :: Maybe String
}
defaultOptions :: Options
defaultOptions = Options {
optShowVersion = False,
optShowHelp = False,
optPrompt = "> ",
optShowSections = False,
optSection = Nothing,
optSubSection = Nothing
}
options :: [OptDescr (Options -> Options)]
options = [
Option ['v', 'V'] ["version"]
(NoArg (\opts -> opts {optShowVersion = True}))
"show version number",
Option ['h', '?'] ["help"]
(NoArg (\opts -> opts {optShowHelp = True}))
"show usage information",
Option ['p'] ["prompt"]
(ReqArg (\prompt opts -> opts {optPrompt = prompt})
"String")
"set prompt string",
Option ['l'] ["list"]
(NoArg (\opts -> opts {optShowSections = True}))
"show section list",
Option ['s'] ["section"]
(ReqArg (\sn opts -> opts {optSection = Just sn})
"String")
"set section number",
Option ['c'] ["subsection"]
(ReqArg (\ssn opts -> opts {optSubSection = Just ssn})
"String")
"set subsection number"
]
printHelp :: IO ()
printHelp = do
putStrLn "Usage: egison-tutorial [options]"
putStrLn ""
putStrLn "Options:"
putStrLn " --help Display this information"
putStrLn " --version Display egison version information"
putStrLn " --prompt string Set prompt of the interpreter"
putStrLn ""
exitWith ExitSuccess
printVersionNumber :: IO ()
printVersionNumber = do
putStrLn $ showVersion P.version
exitWith ExitSuccess
showBanner :: IO ()
showBanner = do
putStrLn $ "Egison Tutorial Version " ++ showVersion P.version ++ " (C) 2013-2014 Satoshi Egi"
putStrLn $ "Welcome to Egison Tutorial!"
putStrLn $ "** Information **"
putStrLn $ "We can use a \'Tab\' key to complete keywords on the interpreter."
putStrLn $ "If we type a \'Tab\' key after a closed parenthesis, the next closed parenthesis will be completed."
putStrLn $ "*****************"
showFinishMessage :: IO ()
showFinishMessage = do
putStrLn $ "You have finished this section."
putStrLn $ "Thank you!"
showByebyeMessage :: IO ()
showByebyeMessage = do
putStrLn $ "Leaving Egison Tutorial.\nByebye."
yesOrNo :: String -> IO Bool
yesOrNo question = do
putStr $ question
putStr $ " (Y/n): "
hFlush stdout
input <- getLine
case input of
[] -> return True
('y':_) -> return True
('Y':_) -> return True
('n':_) -> return False
('N':_) -> return False
_ -> yesOrNo question
nth n = head . drop (n - 1)
selectSection :: Tutorial -> IO Section
selectSection tutorial@(Tutorial sections) = do
putStrLn $ take 30 $ repeat '='
putStrLn $ "List of sections in the tutorial."
putStrLn $ show tutorial
putStrLn $ take 30 $ repeat '='
putStrLn $ "Choose a section to learn."
n <- getNumber (length sections)
return $ nth n sections
getNumber :: Int -> IO Int
getNumber n = do
putStr $ "(1-" ++ show n ++ "): "
hFlush stdout
input <- getLine
case input of
('1':_) -> return 1
('2':_) -> return 2
('3':_) -> return 3
('4':_) -> return 4
-- ('5':_) -> return 5
-- ('6':_) -> return 6
-- ('7':_) -> return 7
-- ('9':_) -> return 9
_ -> do
putStrLn "Invalid input!"
getNumber n
repl :: Env -> String -> IO ()
repl env prompt = do
section <- selectSection tutorial
case section of
Section _ cs -> loop env cs True
where
settings :: MonadIO m => FilePath -> Settings m
settings home = setComplete completeEgison $ defaultSettings { historyFile = Just (home </> ".egison_history") }
loop :: Env -> [Content] -> Bool -> IO ()
loop env [] _ = do
-- liftIO $ showFinishMessage
liftIO $ repl env prompt
loop env (content:contents) b = (do
if b
then liftIO $ putStrLn $ show content
else return ()
home <- getHomeDirectory
input <- liftIO $ runInputT (settings home) $ getEgisonExprOrNewLine prompt
case input of
Left Nothing -> do
b <- yesOrNo "Do you want to quit?"
if b
then return ()
else do
b <- yesOrNo "Do you want to procced next?"
if b
then loop env contents True
else loop env (content:contents) False
Left (Just "") -> do
b <- yesOrNo "Do you want to procced next?"
if b
then loop env contents True
else loop env (content:contents) False
Right (topExpr, _) -> do
result <- liftIO $ runEgisonTopExpr env topExpr
case result of
Left err -> do
liftIO $ putStrLn $ show err
loop env (content:contents) False
Right env' -> loop env' (content:contents) False)
`catch`
(\e -> case e of
UserInterrupt -> putStrLn "" >> loop env (content:contents) False
StackOverflow -> putStrLn "Stack over flow!" >> loop env (content:contents) False
HeapOverflow -> putStrLn "Heap over flow!" >> loop env (content:contents) False
_ -> putStrLn "error!" >> loop env (content:contents) False
)
data Tutorial = Tutorial [Section]
-- |title and contents
data Section = Section String [Content]
-- |explanation, examples, and exercises
data Content = Content String [String] [String]
instance Show Tutorial where
show = showTutorial
instance Show Section where
show = showSection
instance Show Content where
show = showContent
showTutorial :: Tutorial -> String
showTutorial (Tutorial sections) =
let n = length sections in
intercalate "\n" $ map (\(n, section) -> show n ++ ": " ++ show section) $ zip [1..n] sections
showSection :: Section -> String
showSection (Section title _) = title
showContent :: Content -> String
showContent (Content msg examples exercises) =
"====================\n" ++
msg ++ "\n" ++
(case examples of
[] -> ""
_ -> "\nExamples:\n" ++ (intercalate "\n" (map (\example -> " " ++ example) examples)) ++ "\n") ++
(case exercises of
[] -> ""
_ -> "\nExercises:\n" ++ (intercalate "\n" (map (\exercise -> " " ++ exercise) exercises)) ++ "\n") ++
"===================="
tutorial :: Tutorial
tutorial = Tutorial
[Section "Calculate numbers (10 minutes)"
[
Content "We can do arithmetic operations with '+', '-', '*', '/', 'modulo' and 'power'."
["(+ 1 2)", "(- 30 15)", "(* 10 20)", "(/ 20 5)", "(modulo 17 4)", "(power 2 10)"]
[],
Content "We can write nested expressions."
["(+ (* 10 20) 2)", "(/ (* 10 20) (+ 10 20))"]
["Try to calculate '(100 - 1) * (100 + 1)'."],
Content "We are supporting rational numbers."
["(+ 2/3 1/5)", "(/ 42 84)"]
[],
Content "We are supporting floats, too."
["(+ 10.2 1.3)", "(* 10.2 1.3)"]
[],
Content "We can convert a rational number to a float number with 'rtof'."
["(rtof 1/5)", "(rtof 1/100)"]
[],
Content "We can handle collections of numbers.\nWe construct collections with '{}'."
["{}", "{10}", "{1 2 3 4 5}"]
[],
Content "We can decompose a collection using the 'car' and 'cdr' function."
["(car {1 2 3 4 5})", "(cdr {1 2 3 4 5})", "(car (cdr {1 2 3 4 5}))"]
["Try to extract the third element of the collection '{1 2 3 4 5}' with 'car' and 'cdr'."],
Content "With the 'take' function, we can extract a head part of a collection.'."
["(take 0 {1 2 3 4 5})", "(take 3 {1 2 3 4 5})"]
[],
Content "We can handle infinite lists.\nFor example, 'nats' and 'primes' are an infinite list that contains all natural numbers and prime numbers respectively.\nTry to extract a head part from them."
["(take 10 nats)", "(take 30 nats)", "(take 10 primes)", "(take 30 primes)"]
["What is the 100th prime number."],
Content "We can create a \"partial\" function using '$' as an argument."
["((* $ 2) 10)", "((modulo $ 3) 10)"]
[],
Content "With the 'map' function, we can operate each element of the collection at once."
["(take 100 (map (* $ 2) nats))", "(take 100 (map (modulo $ 3) nats))"]
[],
Content "With the 'foldl' function, we can gather together all elements of the collection using an operator you like."
["(foldl + 0 {1 2 3 4 5})", "(foldl * 1 {1 2 3 4 5})"]
["Try to get the sum of from 1 to 100?"],
Content "Try to create a sequence of numbers '{1 1/2 1/3 1/4 ... 1/100}'."
[]
[],
Content "Try to calculate '1 + 1/2 + 1/3 + 1/4 + ... + 1/100'.\nRemember that we can convert a rational number to a float number with 'rtof'."
["(rtof 2/3)"]
[],
Content "Try to calculate '1 + (1/2)^2 + (1/3)^2 + (1/4)^2 + ... + (1/100)^2'.\nIn fact, '1 + (1/2)^2 + (1/3)^2 + (1/4)^2 + ...' converges to '(/ (power pi 2) 6)'."
[]
[],
Content "This is the end of this section.\nPlease play freely or proceed to the next section.\nThank you for enjoying our tutorial!"
[]
[]
],
Section "Basics of functional programming (10 minutes)"
[
Content "We can bind a value to a variable with a 'define' expression.\nWe can easily get the value we bound to a variable."
["(define $x 10)", "x", "(define $y (+ 1 x))", "y"]
[],
Content "We support recursive definitions. It enables us to define an collection with infinite elements."
["(define $ones {1 @ones})", "(take 100 ones)", "(define $nats {1 @(map (+ $ 1) nats)})", "(take 100 nats)", "(define $odds {1 @(map (+ $ 2) odds)})", "(take 100 odds)"]
["Try to define the infinite list of even numbers that is like {2 4 6 8 10 ...}."],
Content "We can create a function with a 'lambda' expression. Let's define functions and test them."
["(define $increment (lambda [$x] (+ x 1)))", "(increment 10)", "(define $multiply (lambda [$x $y] (* x y)))", "(multiply 10 20)", "(define $sum (lambda [$n] (foldl + 0 (take n nats))))", "(sum 10)"]
["Try to define a 'fact' function, which obtains an natural number 'n' and returns 'n * (n - 1) * ... * 2 * 1'."],
Content "We can compare numbers using functions that return '#t' or '#f'.\n'#t' means the true.\n'#f' means the false.\nFunctions that return '#t' or '#f' are called \"predicates\"."
["(eq? 1 1)", "(gt? 1 1)", "(lt? 1 1)", "(gte? 1 1)", "(lte? 1 1)"]
[],
Content "With the 'while' function, we can extract all head elements that satisfy the predicate.\n'primes' is a infinite list that contains all prime numbers."
["(while (lt? $ 100) primes)", "(while (lt? $ 1000) primes)"]
[],
Content "With the 'filter' function, we can extract all elements that satisfy the predicate."
["(take 100 (filter even? nats))", "(take 100 (filter prime? nats))", "(take 100 (filter (lambda [$p] (eq? (modulo p 4) 1)) primes))"]
["Try to enumerate the first 100 primes that are congruent to 3 modulo 4."],
Content "We combine numbers using '[]'.\nThese things are called 'tuples'."
["[1 2]", "[1 2 3]"]
[],
Content "Note that a tuple that consists of only one element is equal with that element itself."
["[1]", "[[[1]]]"]
[],
Content "With the 'zip' function, we can combine two lists as follow."
["(take 100 (zip nats nats))", "(take 100 (zip primes primes))"]
["Try to generate the prime table that is like '{[1 2] [2 3] [3 5] [4 7] [5 11] ...}'"],
Content "Try to create a fibonacci sequence that is like '{1 1 2 3 5 8 13 21 34 55 ...}'.\n\nHint:\n Replace '???' in the following expression to a proper function.\n (define $fibs {1 1 @(map ??? (zip fibs (cdr fibs)))})"
[]
[],
Content "This is the end of this section.\nPlease play freely or proceed to the next section.\nThank you for enjoying our tutorial!"
[]
[]
],
Section "Basics of pattern-matching (10 minutes)"
[
Content "Let's try pattern-matching against a collection.\nThe 'join' pattern divides a collection into two collections.\nPlease note that the 'match-all' expression enumerates all results of pattern-matching."
["(match-all {1 2 3} (list integer) [<join $hs $ts> [hs ts]])",
"(match-all {1 2 3 4 5} (list integer) [<join $hs $ts> [hs ts]])"]
[],
Content "Try another pattern-constructor 'cons'.\nThe 'cons' pattern divides a collection into the head element and the rest collection.\n"
["(match-all {1 2 3} (list integer) [<cons $x $xs> [x xs]])",
"(match-all {1 2 3 4 5} (list integer) [<cons $x $xs> [x xs]])"]
[],
Content "'_' is a wildcard and matches with any objects."
["(match-all {1 2 3} (list integer) [<cons $x _> x])",
"(match-all {1 2 3 4 5} (list integer) [<join $hs _> hs])"]
[],
Content "We can write non-linear patterns.\nNon-linear pattern is a pattern that allows multiple occurrence of same variables in a pattern.\nPatterns that begins with ',' matches the object when it is equal with the expression after ','."
["(match-all {1 1 2 3 3 2} (list integer) [<join _ <cons $x <cons ,x _>>> x])",
"(match-all {1 1 2 3 3 2} (list integer) [<join _ <cons $x <cons ,(+ x 1) _>>> x])"]
[],
Content "We can pattern-match against infinite collections.\nWe can enumerate twin primes using pattern-matching as follow."
["(take 10 (match-all primes (list integer) [<join _ <cons $p <cons ,(+ p 2) _>>> [p (+ p 2)]]))"]
["What is the 100th twin prime?"],
Content "Try to enumerate the first 10 prime pairs whose form is (p, p+6) like '{{[5 11] [7 13] [11 17] [13 19] [17 23] ...}'."
[]
[],
Content "A pattern that has '^' ahead of which is called a not-pattern.\nA not-pattern matches when the target does not match against the pattern."
["(match-all {1 1 2 2 3 4 4 5} (list integer) [<join _ <cons $x <cons ,x _>>> x])",
"(match-all {1 1 2 2 3 4 4 5} (list integer) [<join _ <cons $x <cons ^,x _>>> x])"]
[],
Content "A pattern whose form is '(& p1 p2 ...)' is called an and-pattern.\nAn and-pattern is a pattern that matches the object, if and only if all of the patterns are matched.\nAnd-pattern is used like an as-pattern in the following sample."
["(match-all {1 2 4 5 6 8 9} (list integer) [<join _ <cons $x <cons (& ^,(+ x 1) $y) _>>> [x y]])"]
[],
Content "A pattern whose form is '(| p1 p2 ...)' is called an or-pattern.\nAn or-pattern matches with the object, if the object matches one of given patterns.\nUsing it, We can enumerate prime triplets."
["(take 10 (match-all primes (list integer) [<join _ <cons $p <cons (& $m (| ,(+ p 2) ,(+ p 4))) <cons ,(+ p 6) _>>>> [p m (+ p 6)]]))"]
["What is the 20th prime triplet?"],
Content "Try to enumerate the first 8 prime quadruplets whose form is (p, p+2, p+6, p+8) like '{{[5 7 11 13] [11 13 17 19] ...}'."
[]
[],
Content "This is the end of this section.\nPlease play freely or proceed to the next section.\nThank you for enjoying our tutorial!"
[]
[]
],
Section "Pattern-matching against various data types (10 minutes)"
[
Content "We can also pattern-match against multisets and sets.\nWe can change the way of pattern-matching by just changing a matcher."
["(match-all {1 2 3} (list integer) [<cons $x $xs> [x xs]])",
"(match-all {1 2 3} (multiset integer) [<cons $x $xs> [x xs]])",
"(match-all {1 2 3} (set integer) [<cons $x $xs> [x xs]])"]
[],
Content "Try another pattern-constructor 'join'.\nThe 'join' pattern divides a collection into two collections."
["(match-all {1 2 3 4 5} (list integer) [<join $xs $ys> [xs ys]])",
"(match-all {1 2 3 4 5} (multiset integer) [<join $xs $ys> [xs ys]])",
"(match-all {1 2 3 4 5} (set integer) [<join $xs $ys> [xs ys]])"]
[],
Content "Try non-linear pattern-matching against multiset."
["(match-all {1 1 2 3 2} (multiset integer) [<cons $x <cons ,x _>> x])",
"(match-all {1 1 2 3 2} (multiset integer) [<cons $x <cons ,(+ x 2) _>> x])",
"(match-all {1 2 1 3 2} (multiset integer) [<cons $x ^<cons ,x _>> x])"]
[],
Content "The following samples enumerate pairs and triplets of natural numbers.\nNote that Egison really enumerates all results."
["(take 10 (match-all nats (set integer) [<cons $m <cons $n _>> [m n]]))",
"(take 10 (match-all nats (set integer) [<cons $l <cons $m <cons $n _>>> [l m n]]))"]
[],
Content "This is the end of our tutorial.\nThank you for enjoying our tutorial!\nPlease check our paper, manual and code for further reference!"
[]
[]
]
]
-- Section "Define your own functions"
-- [
-- Content "Did we think how about \"n\" combinations of the elements of the collection?\nWe already have a solution.\nWe can write a pattern that include '...' as the following demonstrations."
-- ["(match-all {1 2 3 4 5} (list integer) [(loop $i [1 3] <join _ <cons $a_i ...>> _) a])", "(match-all {1 2 3 4 5} (list integer) [(loop $i [1 4] <join _ <cons $a_i ...>> _) a])"]
-- [],
-- Content "Let's try 'if' expressions."
-- ["(if #t 1 2)", "(if #f 1 2)", "(let {[$x 10]} (if (eq? x 10) 1 2))"]
-- [],
-- Content "Using 'define' and 'if', we can write recursive functions as follow."
-- ["(define $your-take (lambda [$n $xs] (if (eq? n 0) {} {(car xs) @(your-take (- n 1) (cdr xs))})))", "(your-take 10 nats)"]
-- ["Try to write a 'your-while' function."],
-- Content "Try to write a 'your-map' function.\nWe may need 'empty?' function inside 'your-map' function."
-- ["(empty? {})", "(empty? {1 2 3})"]
-- []
-- Section "Writing scripts in Egison"
-- [
-- Content "Let's write a famous Hello world program in Egison.\nTry the following expression.\nIt is evaluated to the 'io-function'.\nTo execute an io-function, we use 'io' primitive as follow."
-- ["(io (print \"Hello, world!\"))"]
-- [],
-- Content "We can execute multiple io-functions in sequence as follow.\nThe io-functions is executed from the head."
-- ["(io (do {[(print \"a\")] [(print \"b\")] [(print \"c\")]} []))", "(io (do {[(write-string \"Type your name: \")] [(flush)] [$name (read-line)] [(print {@\"Hello, \" @name @\"!\"})]} []))"]
-- [],
-- Content "The following is a hello world program in Egison.\nTry to create a file with the following content and save it as \"hello.egi\", and execute it in the terminal as '% egison hello.egi'\n"
-- ["(define $main (lambda [$args] (print \"Hello, world!\")))"]
-- [],
-- Content "That's all. Thank you for finishing our tutorail! Did you enjoy it?\nIf you got into Egison programming. I'd like you to try Rosseta Code.\nThere are a lot of interesting problems.\n\n http://rosettacode.org/wiki/Category:Egison"
-- []
-- []
-- ]
-- ]
|
hatappo/egison-tutorial
|
Main.hs
|
mit
| 20,686 | 0 | 25 | 5,262 | 3,375 | 1,774 | 1,601 | 366 | 12 |
module ProgrammingInHaskell2.Chap05Spec (spec) where
import SpecHelper
import ProgrammingInHaskell2.Chap05
spec :: Spec
spec = do
describe "5.1" $ do
it "[x^2 | x <- [1..5]]" $ do
[x^2 | x <- [1..5]] `shouldBe` [1,4,9,16,25]
describe "5.2 Guards" $ do
it "factors" $ do
factors 15 `shouldBe` [1,3,5,15]
it "prime" $ do
prime 15 `shouldBe` False
it "primes" $ do
primes 40 `shouldBe` [2,3,5,7,11,13,17,19,23,29,31,37]
it "find" $ do
find 'b' [('a', 1),('b', 2),('c', 3),('b', 4)] `shouldBe` [2,4]
describe "5.3 The zip function" $ do
it "zip ['a', 'b', 'c'] [1,2,3,4]" $ do
zip ['a','b','c'] [1,2,3,4] `shouldBe` [('a',1),('b',2),('c',3)]
it "pairs" $ do
pairs [1,2,3,4] `shouldBe` [(1,2),(2,3),(3,4)]
it "sorted xsは、リストxsが整列済かどうかを判断する" $ do
sorted [1,2,3,4] `shouldBe` True
sorted [1,3,2,4] `shouldBe` False
it "positions" $ do
positions 2 [1,2,3,2] `shouldBe` [1,3]
|
akimichi/haskell-labo
|
test/ProgrammingInHaskell2/Chap05Spec.hs
|
mit
| 1,031 | 0 | 17 | 253 | 526 | 295 | 231 | 27 | 1 |
{-# htermination addListToFM_C :: (b -> b -> b) -> FiniteMap (Ratio Int) b -> [((Ratio Int),b)] -> FiniteMap (Ratio Int) b #-}
import FiniteMap
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/FiniteMap_addListToFM_C_7.hs
|
mit
| 144 | 0 | 3 | 25 | 5 | 3 | 2 | 1 | 0 |
module NES.Emulator.Debug where
import Data.Word (Word8, Word16)
import Data.Bits ((.|.))
import Debug.Trace (trace)
import Text.Printf (printf)
import NES.CPU(Flag(..))
import NES.Instruction
import NES.MonadEmulator
import NES.Emulator
import NES.EmulatorHelpers
emulateCycles :: MonadEmulator m => Int -> m ()
emulateCycles 0 = return ()
emulateCycles n = do
pc <- loadPC
registersState <- registersSnapshot
instruction <- decodeInstruction
execute instruction
traceCurrentState pc instruction registersState
emulateCycles $ n - 1
traceCurrentState :: MonadEmulator m => Word16 -> Instruction -> String -> m ()
traceCurrentState pc instruction registers = trace (printf "%04X %-20s" pc (show instruction) ++ " " ++ registers) $ return ()
registersSnapshot :: MonadEmulator m => m String
registersSnapshot = do
a <- loadA
x <- loadX
y <- loadY
status <- getStatus
sp <- loadSP
return $ printf "A:%02X X:%02X Y:%02X P:%02X SP:%02X" a x y status sp
where
getStatus :: MonadEmulator m => m Word8
getStatus = do
cf <- getFlag CF >>= (\b -> return $ if b then 0x01 else 0)
zf <- getFlag ZF >>= (\b -> return $ if b then 0x02 else 0)
idf <- getFlag IDF >>= (\b -> return $ if b then 0x04 else 0)
dmf <- getFlag DMF >>= (\b -> return $ if b then 0x08 else 0)
ovf <- getFlag OF >>= (\b -> return $ if b then 0x40 else 0)
nf <- getFlag NF >>= (\b -> return $ if b then 0x80 else 0)
return $ 0x20 .|. cf .|. zf .|. idf .|. dmf .|. ovf .|. nf
|
ksaveljev/hNES
|
NES/Emulator/Debug.hs
|
mit
| 1,560 | 0 | 15 | 377 | 585 | 299 | 286 | 38 | 7 |
{-# LANGUAGE
TypeOperators
, MultiParamTypeClasses
, FlexibleInstances
, OverlappingInstances
, FlexibleContexts #-}
module Calculus.Expr where
{-
This module generalizes expressions.
-}
import Auxiliary.List
import Auxiliary.NameSource
import Data.Functor
import Control.Monad.State
-- Coproduct type definition
data (f :+: g) a = Inl (f a) | Inr (g a)
deriving Show
-- Making it to be right-assoc in order
-- to get linear search on it later
infixr 6 :+:
-- Making it to be functor in order to fold it later
instance (Functor f, Functor g) => Functor (f :+: g) where
fmap f (Inl x) = Inl $ fmap f x
fmap f (Inr x) = Inr $ fmap f x
-- Expr is a data type, which generates a recursive
-- Expr over whatever constructors are present in its
-- functor argument f. This argument must be a coproduct
-- type made of subExprs which can be found in subtrees
-- of Expr.
newtype Expr f = In { out :: f (Expr f) }
{-
In :: f (Expr f) -> Expr f
out :: Expr f -> f (Expr f)
`In` rolls one layer of Expr
`out` unrolls one layer of Expr
These functions allows to operate on
outermost constructor
-}
-- To fold Expr
foldExpr :: Functor f => (f a -> a) -> Expr f -> a
foldExpr f = f . fmap (foldExpr f) . out
{-
Here `f` is an algebra which tells us how to fold
a Expr's layer
-}
{-
Consider types
data A a = A a a
data B b = B b
data C c = C
And suppose that we want to make a Expr of type
Expr (A :+: B :+: C)
(:+:) is right-associative, so this type may be rewritten as
Expr (A :+: (B :+: C))
Signature of this type may be considered as list,
where `A` is a head and `B :+: C` is a tail. To get
head, we can use `Inl`; to get tail - `Inr`.
Having that in mind, we can write correct expression
`A (B (C)) C` to suit this type definition:
abcExpr :: Expr (A :+: B :+: C)
abcExpr = In (Inl (A
(In (Inr (Inl (B
(In (Inr (Inr C)))))))
(In (Inr (Inr C)))))
Using (:+:) to construct Expr in modular fashion
comes with overhead on constructors. It's not
wise to turn types from signature into a signature
each time by hands. It can be automated.
-}
class (Functor sub, Functor sup) => sub :<: sup where
inj :: sub a -> sup a
prj :: sup a -> Maybe (sub a)
instance Functor f => f :<: f where
inj = id
prj = Just . id
instance (Functor f, Functor g) => f :<: (f :+: g) where
inj = Inl
prj (Inl f) = Just f
prj _ = Nothing
instance (Functor f, Functor g, Functor h, f :<: g) => f :<: (h :+: g) where
inj = Inr . inj
prj (Inr g) = prj g
prj _ = Nothing
-- rolls next layer
inject :: (g :<: f) => g (Expr f) -> Expr f
inject = In . inj
-- unrolls layer
match :: (g :<: f) => Expr f -> Maybe (g (Expr f))
match (In f) = prj f
{- Rendering -}
class Render f where
render :: (NameState ns m, Render g) => f (Expr g) -> m String
instance (Render f, Render g) => Render (f :+: g) where
render (Inl f) = render f
render (Inr f) = render f
pretty :: (NameSource ns, Render f) => ns -> Expr f -> String
pretty ns f = evalState (render $ out f) ns
prettyPair ::
(NameSource ns, Render f, Render g) => ns -> (Expr f, Expr g) -> (String, String)
prettyPair ns (a, b) =
let
(aSrc, bSrc) = splitSrc ns
aString = pretty aSrc a
bString = pretty bSrc b
in (aString, bString)
renderPair ::
(NameState ns m, Render f) => (Expr f, Expr f) -> m (String, String)
renderPair (a, b) = get >>= return . flip prettyPair (a, b)
renderBinOp ::
(NameState ns m, Render f) => String -> (Expr f, Expr f) -> m String
renderBinOp op pair =
do (a, b) <- renderPair pair
return $ between "(" ")" $ between a b $ between " " " " op
showExprs :: (NameSource ns, Render f) => ns -> [Expr f] -> String
showExprs ns = showColumn . map (pretty ns)
|
wowofbob/calculus
|
Calculus/Expr.hs
|
mit
| 3,924 | 4 | 11 | 1,092 | 1,078 | 563 | 515 | -1 | -1 |
module Zwerg.Data.HP
( HP
, adjustHP
, adjustMaxHP
) where
import Zwerg.Prelude
newtype HP = MkHP (Int, Int)
deriving stock Generic
deriving anyclass Binary
validHP :: (Int,Int) -> Bool
validHP (curHP, maxHP) = curHP >= 0 && curHP <= maxHP && maxHP > 0
instance ZWrapped HP (Int, Int) where
unwrap (MkHP hp) = hp
wrap intPair = if validHP intPair then Just (MkHP intPair) else Nothing
instance ZDefault HP where
zDefault = MkHP (1,1)
{-# INLINABLE adjustHP #-}
adjustHP :: (Int -> Int) -> HP -> HP
adjustHP f (MkHP (curHP, maxHP))
| newHP < 0 = MkHP (0, maxHP)
| newHP > maxHP = MkHP (maxHP, maxHP)
| otherwise = MkHP (newHP, maxHP)
where newHP = f curHP
{-# INLINABLE adjustMaxHP #-}
adjustMaxHP :: (Int -> Int) -> HP -> HP
adjustMaxHP f (MkHP (curHP, maxHP))
| newMaxHP < 0 = MkHP (1, 1)
| curHP > newMaxHP = MkHP (newMaxHP, newMaxHP)
| otherwise = MkHP (curHP, newMaxHP)
where newMaxHP = f maxHP
|
zmeadows/zwerg
|
lib/Zwerg/Data/HP.hs
|
mit
| 962 | 0 | 9 | 228 | 400 | 215 | 185 | -1 | -1 |
-- file: ch03/ListADT.hs
-- From chapter 3, http://book.realworldhaskell.org/read/defining-types-streamlining-functions.html
data List a = Cons a (List a)
| Nil
deriving (Show)
fromList (x:xs) = Cons x (fromList xs)
fromList [] = Nil
|
Sgoettschkes/learning
|
haskell/RealWorldHaskell/ch03/ListADT.hs
|
mit
| 244 | 0 | 8 | 41 | 67 | 36 | 31 | 5 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGStringList
(js_clear, clear, js_initialize, initialize, js_getItem, getItem,
js_insertItemBefore, insertItemBefore, js_replaceItem, replaceItem,
js_removeItem, removeItem, js_appendItem, appendItem,
js_getNumberOfItems, getNumberOfItems, SVGStringList,
castToSVGStringList, gTypeSVGStringList)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"clear\"]()" js_clear ::
JSRef SVGStringList -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGStringList.clear Mozilla SVGStringList.clear documentation>
clear :: (MonadIO m) => SVGStringList -> m ()
clear self = liftIO (js_clear (unSVGStringList self))
foreign import javascript unsafe "$1[\"initialize\"]($2)"
js_initialize :: JSRef SVGStringList -> JSString -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGStringList.initialize Mozilla SVGStringList.initialize documentation>
initialize ::
(MonadIO m, ToJSString item, FromJSString result) =>
SVGStringList -> item -> m result
initialize self item
= liftIO
(fromJSString <$>
(js_initialize (unSVGStringList self) (toJSString item)))
foreign import javascript unsafe "$1[\"getItem\"]($2)" js_getItem
:: JSRef SVGStringList -> Word -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGStringList.getItem Mozilla SVGStringList.getItem documentation>
getItem ::
(MonadIO m, FromJSString result) =>
SVGStringList -> Word -> m result
getItem self index
= liftIO
(fromJSString <$> (js_getItem (unSVGStringList self) index))
foreign import javascript unsafe "$1[\"insertItemBefore\"]($2, $3)"
js_insertItemBefore ::
JSRef SVGStringList -> JSString -> Word -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGStringList.insertItemBefore Mozilla SVGStringList.insertItemBefore documentation>
insertItemBefore ::
(MonadIO m, ToJSString item, FromJSString result) =>
SVGStringList -> item -> Word -> m result
insertItemBefore self item index
= liftIO
(fromJSString <$>
(js_insertItemBefore (unSVGStringList self) (toJSString item)
index))
foreign import javascript unsafe "$1[\"replaceItem\"]($2, $3)"
js_replaceItem ::
JSRef SVGStringList -> JSString -> Word -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGStringList.replaceItem Mozilla SVGStringList.replaceItem documentation>
replaceItem ::
(MonadIO m, ToJSString item, FromJSString result) =>
SVGStringList -> item -> Word -> m result
replaceItem self item index
= liftIO
(fromJSString <$>
(js_replaceItem (unSVGStringList self) (toJSString item) index))
foreign import javascript unsafe "$1[\"removeItem\"]($2)"
js_removeItem :: JSRef SVGStringList -> Word -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGStringList.removeItem Mozilla SVGStringList.removeItem documentation>
removeItem ::
(MonadIO m, FromJSString result) =>
SVGStringList -> Word -> m result
removeItem self index
= liftIO
(fromJSString <$> (js_removeItem (unSVGStringList self) index))
foreign import javascript unsafe "$1[\"appendItem\"]($2)"
js_appendItem :: JSRef SVGStringList -> JSString -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGStringList.appendItem Mozilla SVGStringList.appendItem documentation>
appendItem ::
(MonadIO m, ToJSString item, FromJSString result) =>
SVGStringList -> item -> m result
appendItem self item
= liftIO
(fromJSString <$>
(js_appendItem (unSVGStringList self) (toJSString item)))
foreign import javascript unsafe "$1[\"numberOfItems\"]"
js_getNumberOfItems :: JSRef SVGStringList -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGStringList.numberOfItems Mozilla SVGStringList.numberOfItems documentation>
getNumberOfItems :: (MonadIO m) => SVGStringList -> m Word
getNumberOfItems self
= liftIO (js_getNumberOfItems (unSVGStringList self))
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/JSFFI/Generated/SVGStringList.hs
|
mit
| 5,018 | 64 | 11 | 862 | 1,109 | 617 | 492 | 85 | 1 |
{-# LANGUAGE FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, TemplateHaskell, TupleSections, TypeFamilies, TypeSynonymInstances #-}
module SpaceWeather.FeaturePack where
import Control.Lens
import Control.Monad.Trans.Either
import Control.Monad.IO.Class
import qualified Data.Aeson.TH as Aeson
import qualified Data.ByteString.Char8 as BS
import Data.Char
import Data.Function(on)
import Data.List (sortBy)
import qualified Data.Map.Strict as Map
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.Yaml as Yaml
import qualified Data.Vector as V
import System.IO
import qualified System.IO.Hadoop as HFS
import System.Random
import Test.QuickCheck.Arbitrary
import Text.Printf
import SpaceWeather.CmdArgs
import SpaceWeather.Feature
import SpaceWeather.Format
import SpaceWeather.TimeLine
data FeatureSchema
= FeatureSchema
{ _colT :: Int
, _colX :: Int
, _scaling :: Double
, _isLog :: Bool
} deriving (Eq, Ord, Show, Read)
makeClassy ''FeatureSchema
Aeson.deriveJSON Aeson.defaultOptions{Aeson.fieldLabelModifier = drop 1} ''FeatureSchema
defaultFeatureSchema :: FeatureSchema
defaultFeatureSchema = FeatureSchema
{ _colT = 1
, _colX = 2
, _scaling = 1
, _isLog = False
}
newtype FeaturePack
= FeaturePack [Feature]
makeWrapped ''FeaturePack
makeClassy ''FeaturePack
data FeatureSchemaPack
= FeatureSchemaPack
{ _fspSchemaDefinitions :: Map.Map String FeatureSchema
, _fspFilenamePairs :: [(String, FilePath)]
} deriving (Eq, Ord, Show, Read)
makeClassy ''FeatureSchemaPack
Aeson.deriveJSON Aeson.defaultOptions{Aeson.fieldLabelModifier = drop 4} ''FeatureSchemaPack
instance Format FeatureSchemaPack where
encode = T.pack . BS.unpack . Yaml.encode
decode = Yaml.decodeEither . BS.pack . T.unpack
loadFeatureWithSchema :: FeatureSchema -> FilePath -> IO (Either String Feature)
loadFeatureWithSchema schema0 fp = runEitherT $ loadFeatureWithSchemaT schema0 fp
loadFeatureWithSchemaT :: FeatureSchema -> FilePath -> EitherT String IO Feature
loadFeatureWithSchemaT schema0 fp = do
txt0 <- liftIO $ do
hPutStrLn stderr $ "loading: " ++ fp
HFS.readFile $ fp
gen0 <- liftIO $ newStdGen
gen1 <- liftIO $ newStdGen
let
convert :: Double -> Double
convert x = if schema0^.isLog then (if x <= 0 then 0 else log x / log 10) else x
parseLine :: (Int, T.Text) -> Either String (TimeBin, Double)
parseLine (lineNum, txt) = do
let wtxt = T.words txt
m2e :: Maybe a -> Either String a
m2e Nothing = Left $ printf "file %s line %d: parse error" fp lineNum
m2e (Just x) = Right $ x
t <- m2e $ readAt wtxt (schema0 ^. colT - 1)
a <- m2e $ readAt wtxt (schema0 ^. colX - 1)
-- if (schema0^.isLog && a <= 0)
-- then Left $ printf "file %s line %d: logscale specified but non-positive colX value: %s" fp lineNum (show a)
-- else return (t,(schema0^.scaling) * convert a)
return (t,(schema0^.scaling) * convert a)
ret :: Either String Feature
ret = fmap fluctuate $
fmap Map.fromList $ mapM parseLine $ linesWithComment txt0
fluctuate :: Feature -> Feature
fluctuate f = f & partsOf each %~ (fluctuateSpace . fluctuateTime)
fluctuateSpace :: [Double] -> [Double]
fluctuateSpace xs = zipWith (\x f -> exp f * x) xs $ randomRs (-spacialNoise, spacialNoise) gen0
fluctuateTime :: [Double] -> [Double]
fluctuateTime xs = let vxs = V.fromList xs in
map (\i -> vxs V.! i) $
zipWith (\i r -> max 0 $ floor $ i-r) [0..] $ randomRs (0,1 + temporalNoise) gen1
hoistEither ret
loadFeatureSchemaPack :: FeatureSchemaPack -> IO (Either String FeaturePack)
loadFeatureSchemaPack fsp = do
let
list0 = fsp ^. fspFilenamePairs
scMap = fsp ^. fspSchemaDefinitions
name2map :: String -> FeatureSchema
name2map fmtName = maybe defaultFeatureSchema
id (Map.lookup fmtName scMap)
list1 <- runEitherT $ mapM (uncurry loadFeatureWithSchemaT) $
map (_1 %~ name2map) list0
return $ fmap FeaturePack list1
|
nushio3/UFCORIN
|
src/SpaceWeather/FeaturePack.hs
|
mit
| 4,218 | 0 | 20 | 949 | 1,216 | 649 | 567 | 97 | 4 |
module TestTCP (
tests4
,tests6
) where
-- local imports
import Network.Transport.Sockets.TCP
import TransportTestSuite
-- external imports
import Test.Framework
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
tests4 :: IO [Test.Framework.Test]
tests4 = do
name1 <- newTCPAddress
name2 <- newTCPAddress
name3 <- newTCPAddress
name4 <- newTCPAddress
return $ transportTestSuite
(newTCPTransport4 tcpSocketResolver4)
"tcp4"
name1
name2
name3
name4
tests6 :: IO [Test.Framework.Test]
tests6 = do
name1 <- newTCPAddress6
name2 <- newTCPAddress6
name3 <- newTCPAddress6
name4 <- newTCPAddress6
return $ transportTestSuite
(newTCPTransport6 tcpSocketResolver6)
"tcp6"
name1
name2
name3
name4
_log :: String
_log = "_test_TCP"
|
hargettp/courier
|
tests/TestTCP.hs
|
mit
| 924 | 0 | 10 | 181 | 187 | 98 | 89 | 34 | 1 |
module ExLint.Types
( Block(..)
, Language(..)
, Example(..)
, CheckResult(..)
, Plugin(..)
) where
import Data.Monoid ((<>))
import Data.Text (Text)
import Text.Markdown.Block (Block(..))
data Language
= Haskell
| Unknown
deriving Show
data Example = Example
{ examplePlugin :: Plugin
, examplePreamble :: Text
, exampleExpression :: Text
, exampleResult :: Text
}
deriving Show
data CheckResult
= Match
| MisMatch Text Text Text
deriving Show
data Plugin = Plugin
{ pluginLanguage :: Language
, pluginSigil :: Text
, pluginCheck :: Example -> IO CheckResult
}
instance Show Plugin where
show (Plugin l _ _) = "ExLint.Plugin for " <> show l
|
pbrisbin/ex-lint
|
src/ExLint/Types.hs
|
mit
| 740 | 0 | 10 | 199 | 215 | 132 | 83 | 29 | 0 |
{-# LANGUAGE OverloadedStrings #-}
import CheckConcreteSyntax hiding (main)
import CheckQuantityBehaviour hiding (main)
import CheckSkeletonParser hiding (main)
import CheckTranslationPhase hiding (main)
import Core.System
import Test.Hspec (Spec, hspec)
main :: IO ()
main = do
finally (hspec suite) (putStrLn ".")
suite :: Spec
suite = do
checkQuantityBehaviour
checkConcreteSyntax
checkSkeletonParser
checkTranslationPhase
|
oprdyn/technique
|
tests/TestSuite.hs
|
mit
| 439 | 0 | 9 | 61 | 113 | 63 | 50 | 16 | 1 |
{-|
Module : Lambdajudge
Description : Easily host haskell based coding competitions using Lambdajudge library
Copyright : (c) Ankit Kumar, Venugopal, Safal 2015
License : MIT
Maintainer : [email protected]
Stability : experimental
Portability : POSIX
-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE FlexibleContexts #-}
module Lambdajudge
(
muevalAvlbl,
gradeSubmission,
runLJMonad
) where
import System.Environment
import System.FilePath
import System.Timeout
import System.Process
import System.Exit
import System.IO
import Data.List
import Control.Monad.State
import Control.Monad.Writer
import Control.Monad.Error
import Control.Monad.Identity
import Control.Exception
import DataStructures
import Utility
-- TODO : export functions finalize, check if mueval present, parallelize, count threads, loading, logging
-- | runs "solution" function loaded from the Submitted solution file within mueval. The expression is provided contents of test cases as input
runSubmission :: MuevalCommand -> LJMonad StatusCode
runSubmission mue = do
let options = ["-t", show $ maxRunTime mue] ++
["-e", "lines (" ++ expression mue ++ " " ++ show (testData mue) ++ ") == lines (" ++ (trim . show) (ansData mue) ++ ")"] ++
["-l", solutionFile mue]
(status,out,err) <- liftIO $ readProcessWithExitCode "mueval" options ""
tell $ show (status,out,err)
liftIO $ print (status,out,err)
modify succ
case status of
ExitSuccess -> case err of
"" -> if (read out :: Bool)
then return AC
else return WA
ExitFailure a -> if "Time limit exceeded" `isInfixOf` err
then return TLE
else error err
-- | maps evaluation of solution file to each test case of the problem. Effectively we are sequencing the runs corresponding to each test case
gradeSubmission :: Problem -> FilePath -> LJMonad [StatusCode]
gradeSubmission prob solutionFile = do
let computations = map (\i -> uncurry (MuevalCommand "solution") (testCases prob!!i) solutionFile (timeLimit prob)) [0..length (testCases prob) - 1]
mapM runSubmission computations
-- | runs submitted code on problem testcases
runLJMonad :: Num s => WriterT w (StateT s (ErrorT e m)) a -> m (Either e ((a, w), s))
runLJMonad = runErrorT . flip runStateT 0 . runWriterT
-- | Idiom to catch any type of exception thrown
catchAny :: IO a -> (SomeException -> IO a) -> IO a
catchAny = Control.Exception.catch
-- | Check if mueval is installed on this system
muevalAvlbl :: IO Bool
muevalAvlbl = do
res <- liftIO $ catchAny (readProcessWithExitCode "mueval" ["-e","1+1"] "") $ \e -> do
putStrLn "Got an exception."
return (ExitFailure 1,"","")
case res of
(ExitSuccess,out,err) -> return True
otherwise -> return False
-- | creating a sample Contest
createContest :: IO Problem
createContest = do
let dir = "test/contest1/Q1/"
t1 <- getFileContents (dir </> "input00.txt")
s1 <- getFileContents (dir </> "output00.txt")
t2 <- getFileContents (dir </> "input02.txt")
s2 <- getFileContents (dir </> "output02.txt")
ps1 <- getFileContents (dir </> "ProblemStatement")
sol1 <- getFileContents (dir </> "Solution/Solution.hs")
return $ Problem ps1 [(t1,s1),(t2,s2)] sol1 5
test = do
prob1 <- Lambdajudge.createContest
runLJMonad $ gradeSubmission prob1 "test/contest1/Q1/Solution/Solution.hs"
|
venugangireddy/Lambdajudge
|
src/Lambdajudge.hs
|
mit
| 3,627 | 0 | 19 | 840 | 878 | 456 | 422 | 68 | 4 |
-----------------------------------------------------------------------------
--
-- Module : Language.PureScript.CodeGen.Common
-- Copyright : (c) Phil Freeman 2013
-- License : MIT
--
-- Maintainer : Phil Freeman <[email protected]>
-- Stability : experimental
-- Portability :
--
-- |
-- Common code generation utility functions
--
-----------------------------------------------------------------------------
module Language.PureScript.CodeGen.Common where
import Data.Char
import Data.List (intercalate)
import Language.PureScript.Names
-- |
-- Convert an Ident into a valid Javascript identifier:
--
-- * Alphanumeric characters are kept unmodified.
--
-- * Reserved javascript identifiers are prefixed with '$$'.
--
-- * Symbols are prefixed with '$' followed by a symbol name or their ordinal value.
--
identToJs :: Ident -> String
identToJs (Ident name) | nameIsJsReserved name = "$$" ++ name
identToJs (Ident name) = concatMap identCharToString name
identToJs (Op op) = concatMap identCharToString op
-- |
-- Attempts to find a human-readable name for a symbol, if none has been specified returns the
-- ordinal value.
--
identCharToString :: Char -> String
identCharToString c | isAlphaNum c = [c]
identCharToString '_' = "_"
identCharToString '.' = "$dot"
identCharToString '$' = "$dollar"
identCharToString '~' = "$tilde"
identCharToString '=' = "$eq"
identCharToString '<' = "$less"
identCharToString '>' = "$greater"
identCharToString '!' = "$bang"
identCharToString '#' = "$hash"
identCharToString '%' = "$percent"
identCharToString '^' = "$up"
identCharToString '&' = "$amp"
identCharToString '|' = "$bar"
identCharToString '*' = "$times"
identCharToString '/' = "$div"
identCharToString '+' = "$plus"
identCharToString '-' = "$minus"
identCharToString ':' = "$colon"
identCharToString '\\' = "$bslash"
identCharToString '?' = "$qmark"
identCharToString '@' = "$at"
identCharToString '\'' = "$prime"
identCharToString c = '$' : show (ord c)
-- |
-- Checks whether an identifier name is reserved in Javascript.
--
nameIsJsReserved :: String -> Bool
nameIsJsReserved name =
name `elem` [ "abstract"
, "arguments"
, "boolean"
, "break"
, "byte"
, "case"
, "catch"
, "char"
, "class"
, "const"
, "continue"
, "debugger"
, "default"
, "delete"
, "do"
, "double"
, "else"
, "enum"
, "eval"
, "export"
, "extends"
, "final"
, "finally"
, "float"
, "for"
, "function"
, "goto"
, "if"
, "implements"
, "import"
, "in"
, "instanceof"
, "int"
, "interface"
, "let"
, "long"
, "native"
, "new"
, "null"
, "package"
, "private"
, "protected"
, "public"
, "return"
, "short"
, "static"
, "super"
, "switch"
, "synchronized"
, "this"
, "throw"
, "throws"
, "transient"
, "try"
, "typeof"
, "var"
, "void"
, "volatile"
, "while"
, "with"
, "yield" ]
-- |
-- Test if a string is a valid JS identifier (may return false negatives)
--
isIdent :: String -> Bool
isIdent s@(first : rest) | not (nameIsJsReserved s) && isAlpha first && all isAlphaNum rest = True
isIdent _ = False
moduleNameToJs :: ModuleName -> String
moduleNameToJs (ModuleName pns) = intercalate "_" (runProperName `map` pns)
|
bergmark/purescript
|
src/Language/PureScript/CodeGen/Common.hs
|
mit
| 3,960 | 0 | 12 | 1,333 | 671 | 386 | 285 | 101 | 1 |
{-# LANGUAGE OverloadedStrings, TypeSynonymInstances, FlexibleInstances #-}
module Y2017.M10.D05.Exercise where
{--
Yesterday, I said: "Go out to the database, get some data, analyze it and chart
it." Well, that's all good if you can work on the problem when you have access
to the internet at large to retrieve your data from the DaaS, but let's say,
hypothetically speaking, that you're behind a firewall, so you only have tiny
slots of time when you go out for that Pumpkin Spiced Latte with your friendies
to access those data. You can't write code, get the data, analyze the data and
then plot the results over just one 'PSL' (The TLAs are taking over smh).
So, what's the solution? Order a second PSL? But then that contributes to the
obesity pandemic, and I don't want that on my resume, thank you very much.
And then there's the credit ratings to consider.
(somebody cue the theme to the movie 'Brazil')
So, the data sets we're talking about aren't petabytes nor exobytes, so why not
just retain those data locally so the development and analyses can be done
off-line or behind the firewall?
Why not, indeed!
Today's Haskell problem: read the rows of data from the article, subject, and
article_subject tables, save them locally as JSON, do some magic hand-waving
or coffee-drinking while you analyze those data off-line, then read in the
data from the JSON store.
--}
import Data.Aeson
import Data.Aeson.Encode.Pretty
import Data.Aeson.Types
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.HashMap.Strict as HM
import Data.Map (Map)
import Data.Maybe
import qualified Data.Text as T
import Data.Time
import Database.PostgreSQL.Simple
-- We've got the database reads from yesterday exercise. Do that.
-- below imports available via 1HaskellADay git repository
import Data.Hierarchy
import Store.SQL.Util.Pivots
import Y2017.M10.D04.Exercise
-- Now let's save out those data to JSON
instance ToJSON Subject where
toJSON subj = undefined
instance ToJSON Pivot where
toJSON piv = undefined
instance ToJSON ArticleSummary where
toJSON artsum = undefined
-- Okay, so we've enjsonified the rows of the tables, now let's enjsonify
-- the entire table!
data Table a = Table { name :: String, rows :: [a] }
deriving (Eq, Show)
instance ToJSON a => ToJSON (Table a) where
toJSON table = undefined
-- save out the three tables as JSON to their own files
-- TIME PASSES -------------------------------------------------------------
-- Okay now do the same thing IN REVERSE! Load the tables from JSON files
instance FromJSON a => FromJSON (Table a) where
parseJSON obj = undefined
parseKV :: FromJSON a => (String, Value) -> Parser (Table a)
parseKV (name,list) = undefined
{--
Hint: parsing tables is a bit tricky because the key to the values is the
table name. See:
https://stackoverflow.com/questions/42578331/aeson-parse-json-with-unknown-key-in-haskell
for a discussion on parsing unknown keys
--}
instance FromJSON Subject where
parseJSON obj = undefined
instance FromJSON Pivot where
parseJSON obj = undefined
instance FromJSON ArticleSummary where
parseJSON obj = undefined
-- read in your tables back
-- run your analyses
-- celebrate with Pumpkin Spice Lattes!
{-- BONUS -----------------------------------------------------------------
Once you have the grouping from yesterday, you can output the result
graphically as:
1. 3D scatterplot of topics by date
2. Concentric circles: topics containing dates containing articles
3. N-dimentional graph of Topics x Dates x Articles
you choose, or create your own data visualization
--}
visualize :: Grouping -> IO ()
visualize groups = undefined
groupToHierarchy :: Grouping -> Hierarchy String
groupToHierarchy = Hier "NYT Archive" . undefined
topicArts :: (Topic, Map Day [Title]) -> Hierarchy String
topicArts row = undefined
dayArts :: (Day, [Title]) -> Hierarchy String
dayArts = undefined
|
geophf/1HaskellADay
|
exercises/HAD/Y2017/M10/D05/Exercise.hs
|
mit
| 3,937 | 0 | 9 | 655 | 454 | 261 | 193 | 43 | 1 |
import FPPrac
merge [] ys = ys
merge xs [] = xs
merge (x:xs) (y:ys) | x <= y = x : merge xs (y:ys)
| otherwise = y : merge (x:xs) ys
mSort :: Ord a => [a] -> [a]
mSort [] = []
mSort [x] = [x]
mSort xs = let (as, bs) = splitAt (length xs `quot` 2) xs
in merge (mSort as) (mSort bs)
|
tcoenraad/functioneel-programmeren
|
practica/serie2/10.merge-sort.hs
|
mit
| 314 | 0 | 12 | 101 | 211 | 107 | 104 | 10 | 1 |
-- Atari 2600 6507 CPU --
module Cpu where
import Data.Word
import Data.Bits
import Data.Int
import Control.Monad.State.Strict
import Control.Applicative hiding ((<|>), many, optional, empty)
import qualified Data.Vector.Unboxed as VU
data CPU = CPU {
registers :: Registers,
memory :: Memory,
cycles :: Integer,
scanLine :: Integer
}
data Registers = Registers {
regA :: Word8, -- ^ Accumulator
regX :: Word8, -- ^ Index Register X
regY :: Word8, -- ^ Index Register Y
status :: Word8, -- ^ Processor Status Register
stackPtr :: Word8, -- ^ Stack Pointer
pc :: Word16 -- ^ Program Counter
}
data StatusFlags =
Carry
| Zero
| IRQDisable
| Decimal
| Break
| Unused
| Overflow
| Negative
data Memory = Memory {
workRAM :: VU.Vector Word8, -- ^ 0x0000 - 0x07FF Work RAM
ppuCtrlRAM :: VU.Vector Word8, -- ^ 0x2000 - 0x2007 PPU Control Registers
apuRegisters :: VU.Vector Word8, -- ^ 0x4000 - 0x401F APU Registers
expansionROM :: VU.Vector Word8, -- ^ 0x4200 - 0x5FFF Cartridge Expansion ROM
staticRAM :: VU.Vector Word8, -- ^ 0x6000 - 0x7FFF SRAM
prgROM0 :: VU.Vector Word8, -- ^ 0x8000 - 0xBFFF Program ROM bank 0
prgROM1 :: VU.Vector Word8 -- ^ 0xC000 - 0xFFFF Program ROM bank 1
}
type CPUState a = State CPU a
data AddressingMode =
Immediate | ZeroPageNoReg | ZeroPage Reg | AbsoluteNoReg |
Absolute Reg | IndirectX | IndirectY
data Reg = A | X | Y | S | SP deriving (Eq)
createCPU :: Word16 -> CPU
createCPU startAddr = cpu'
where cycles' = 0
regs = Registers 0 0 0 0x24 0xFD startAddr
cpu' = CPU regs mem cycles' 241
mem = Memory
(VU.replicate 0x0800 0x0)
(VU.replicate 0x0008 0x0)
(VU.replicate 0x0020 0x0)
(VU.replicate 0x1FDF 0x0)
(VU.replicate 0x2000 0x0)
(VU.replicate 0x4000 0x0)
(VU.replicate 0x4000 0x0)
-- Load ROM into memory starting at given address
loadMemory :: [Word8] -> Word16 -> CPU -> CPU
loadMemory rom startAddr = execState loadMemory'
where loadMemory' :: CPUState ()
loadMemory' =
mapM_ (\(val, offset) -> setMem val (startAddr + offset))
(zip rom [0,1..0xFFFF - startAddr])
getReg :: Reg -> CPUState Word8
getReg A = getA
getReg X = getX
getReg Y = getY
getReg S = getS
getReg SP = getSP
setReg :: Reg -> Word8 -> CPUState()
setReg A = setA
setReg X = setX
setReg Y = setY
setReg S = setS
setReg SP = setSP
-- Check if memory access crosses page boundary e.g. 0x01FF 0x0200 are in different pages
-- if so, add 1 to current cycle count
checkPageBoundary :: Word16 -> Word16 -> CPUState ()
checkPageBoundary w1 w2 =
if (w1 .&. 0xFF00) /= (w2 .&. 0xFF00)
then modifyCycles (+1)
else
return()
-- Obtain value in memoru for given addressing mode,
-- If boundary check is set then for Absolute X and Y, and Indirect Y check
-- if page boundary crosses and increment the cycle count if it has
obtainModeVal :: AddressingMode -> Bool -> CPUState Word8
obtainModeVal mode checkBoundary = case mode of
Immediate -> getIm
ZeroPageNoReg -> getMem . fromIntegral =<< getIm
ZeroPage reg -> getMem . fromIntegral =<< (+) <$> getIm <*> getReg reg
AbsoluteNoReg -> getMem =<< getImm
Absolute reg -> do
r <- getReg reg
imm <- getImm
let sum' = imm + (fromIntegral r)
when checkBoundary $ checkPageBoundary imm sum'
getMem sum'
IndirectX -> do
im <- getIm
x <- getX
addr <- concatBytesLe <$> (getMem $ fromIntegral (im + x))
<*> (getMem $ fromIntegral (im + x + 1))
getMem addr
IndirectY -> do
im <- getIm
addr1 <- getMem $ fromIntegral im
addr2 <- getMem $ fromIntegral (im + 1)
let addr = concatBytesLe addr1 addr2
y <- fromIntegral `fmap` getY
when checkBoundary $ checkPageBoundary addr (addr + y)
getMem $ addr + y
setModeVal :: Word8 -> AddressingMode -> CPUState ()
setModeVal w8 mode = case mode of
Immediate -> setIm w8
ZeroPageNoReg -> (setMem w8) . fromIntegral =<< getIm
ZeroPage reg -> (setMem w8) . fromIntegral =<< (+) <$> getIm <*> getReg reg
AbsoluteNoReg -> setMem w8 =<< getImm
Absolute reg -> setMem w8 =<< (+) <$> getImm <*> (fromIntegral <$> (getReg reg))
IndirectX -> do
im <- getIm
x <- getX
addr <- concatBytesLe <$> (getMem $ fromIntegral (im + x))
<*> (getMem $ fromIntegral (im + x + 1))
setMem w8 addr
IndirectY -> do
im <- getIm
addr1 <- getMem $ fromIntegral im
addr2 <- getMem $ fromIntegral (im + 1)
y <- getY
setMem w8 $ (concatBytesLe addr1 addr2) + (fromIntegral y)
push :: Word8 -> CPUState () -- [SP] = val, SP = SP - 1
push w8 = do
addr <- getSP
-- SP points to mem locations 0x100 to 0x1FF
setMem w8 $ 0x100 + (fromIntegral addr)
setSP (addr - 1)
pop :: CPUState (Word8) -- SP = SP + 1, val = [SP]
pop = do
addr <- getSP
val <- getMem $ 1 + 0x100 + (fromIntegral addr)
setSP (addr + 1)
return val
-- Get/Set all Registers from CPU
getRegs :: CPUState Registers
getRegs = get >>= return . registers
setRegs :: Registers -> CPUState ()
setRegs regs = get >>= \cpu -> put $ cpu {registers = regs}
-- Get/Set Cycles of CPU
getCycles :: CPUState Integer
getCycles = get >>= return . cycles
setCycles :: Integer -> CPUState ()
setCycles c = get >>= \cpu -> put $ cpu {cycles = c}
modifyCycles :: (Integer -> Integer) -> CPUState ()
modifyCycles f = do
cycles' <- getCycles
setCycles (f cycles')
-- Get/Set current ScanLine
getScanLine :: CPUState Integer
getScanLine = get >>= return . scanLine
setScanLine :: Integer -> CPUState ()
setScanLine sl = get >>= \cpu -> put $ cpu {scanLine = sl}
modifyScanLine :: (Integer -> Integer) -> CPUState ()
modifyScanLine f = do
sl <- getScanLine
setScanLine (f sl)
-- Set specific Registers
setA :: Word8 -> CPUState ()
setA w = getRegs >>= \regs -> setRegs $ regs {regA = w}
setX :: Word8 -> CPUState ()
setX w = getRegs >>= \regs -> setRegs $ regs {regX = w}
setY :: Word8 -> CPUState ()
setY w = getRegs >>= \regs -> setRegs $ regs {regY = w}
setS :: Word8 -> CPUState ()
setS w = getRegs >>= \regs -> setRegs $ regs {status = w}
setSP :: Word8 -> CPUState()
setSP w = getRegs >>= \regs -> setRegs $ regs {stackPtr = w}
setPC :: Word16 -> CPUState ()
setPC w = getRegs >>= \regs -> setRegs $ regs {pc = w}
-- |Get Registers
getA :: CPUState Word8
getA = getRegs >>= return . regA
getX :: CPUState Word8
getX = getRegs >>= return . regX
getY :: CPUState Word8
getY = getRegs >>= return . regY
getS :: CPUState Word8
getS = getRegs >>= return . status
getSP :: CPUState Word8
getSP = getRegs >>= return . stackPtr
getPC :: CPUState Word16
getPC = getRegs >>= return . pc
-- Memory
getIm :: CPUState Word8
getIm = do
pc' <- getPC
getMem (pc' - 1)
setIm :: Word8 -> CPUState ()
setIm w8 = do
pc' <- getPC
setMem w8 (pc' - 1)
getImm :: CPUState Word16
getImm = do
pc' <- getPC
concatBytesLe <$> getMem (pc' - 2) <*> getMem (pc' - 1)
setImm :: Word16 -> CPUState ()
setImm w16 = do
pc' <- getPC
setMem (fromIntegral (w16 .&. 0x7F)) (pc' - 2)
setMem (fromIntegral (w16 `shiftR` 8)) (pc' - 1)
getAllMem :: CPUState Memory
getAllMem = get >>= return . memory
setAllMem :: Memory -> CPUState ()
setAllMem mem = get >>= \cpu -> put $ cpu {memory = mem}
-- Get/Set word from Memory
getMem :: Word16 -> CPUState Word8
getMem addr = do
(Memory wRAM pCtrlRAM aRegisters expansionRegs static prgR0 prgR1) <- getAllMem
if addr < 0x2000 then
return $ wRAM VU.! fromIntegral (addr .&. 0x7FF)
else if addr < 0x4000 then
return $ pCtrlRAM VU.! fromIntegral (addr .&. 0x7)
else if addr < 0x4020 then
return $ aRegisters VU.! fromIntegral (addr - 0x4000)
else if addr < 0x6000 then
return $ expansionRegs VU.! fromIntegral (addr - 0x4020)
else if addr < 0x8000 then
return $ static VU.! fromIntegral (addr - 0x6000)
else if addr < 0xC000 then
return $ prgR0 VU.! fromIntegral (addr - 0x8000)
else
return $ prgR1 VU.! fromIntegral (addr - 0xC000)
setMem :: Word8 -> Word16 -> CPUState ()
setMem val addr = do
mem@(Memory wRAM pCtrlRAM aRegisters expansionRegs static prgR0 prgR1) <- getAllMem
if addr < 0x2000 then
setAllMem $ mem {workRAM = setMem' wRAM (.&. 0x7FF)}
else if addr < 0x4000 then
setAllMem $ mem {ppuCtrlRAM = setMem' pCtrlRAM (.&. 0x7)}
else if addr < 0x4020 then
setAllMem $ mem {apuRegisters = setMem' aRegisters (\a -> a - 0x4000)}
else if addr < 0x6000 then
setAllMem $ mem {expansionROM = setMem' expansionRegs (\a -> a - 0x4020)}
else if addr < 0x8000 then
setAllMem $ mem {staticRAM = setMem' static (\a -> a - 0x6000)}
else if addr < 0xC000 then
setAllMem $ mem {prgROM0 = setMem' prgR0 (\a -> a - 0x8000)}
else
setAllMem $ mem {prgROM1 = setMem' prgR1 (\a -> a - 0xC000)}
where setMem' :: VU.Vector Word8 -> (Word16 -> Word16) -> VU.Vector Word8
setMem' v f = VU.update v (VU.fromList [(fromIntegral (f addr), val)])
getCarry :: CPUState Bool
getCarry = getS >>= \s -> return $ (s .&. 0x1) /= 0
getNeg :: CPUState Bool
getNeg = getS >>= \s -> return $ (s .&. 0x80) /= 0
getZero :: CPUState Bool
getZero = getS >>= \s -> return $ (s .&. 0x2) /= 0
getOverflow :: CPUState Bool
getOverflow = getS >>= \s -> return $ (s .&. 0x40) /= 0
setFlag :: Word8 -> Bool -> CPUState ()
setFlag bits state' = do
s <- getS
setS $ op s
where op = if state' then (.|. bits) else (.&. (0xFF - bits))
setCarry :: Bool -> CPUState ()
setCarry = setFlag 0x1
setNeg :: Bool -> CPUState ()
setNeg = setFlag 0x80
setZero :: Bool -> CPUState ()
setZero = setFlag 0x2
setOverflow :: Bool -> CPUState ()
setOverflow = setFlag 0x40
setIRQ :: Bool -> CPUState ()
setIRQ = setFlag 0x4
-- Flag Checking Helper Functions --
checkNegFlag :: Word8 -> CPUState ()
checkNegFlag w8 = setNeg (w8 > 127)
checkZeroFlag :: Word8 -> CPUState ()
checkZeroFlag w8 = setZero (w8 == 0)
boolToBit :: Bool -> Word8
boolToBit b = if b then 1 else 0
-- Concatonate 2 bytes into 16 byte little endian word
-- first byte given is lower half of the word, second is upper half
concatBytesLe :: Word8 -> Word8 -> Word16
concatBytesLe b1 b2 = fromIntegral b1 .|. shiftL (fromIntegral b2) 8
add3 :: Word8 -> Word8 -> Word8 -> Word8
add3 a b c = a + b + c
isNeg :: Word8 -> Bool
isNeg = (>) 127
toSigned :: Word8 -> Int8
toSigned w8 = fromIntegral $ (w8 .&. 127) - fromIntegral (w8 .&. 128)
|
RossMeikleham/NesMonad
|
src/Cpu.hs
|
mit
| 10,856 | 0 | 19 | 2,866 | 3,722 | 1,952 | 1,770 | 269 | 7 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition.html
module Stratosphere.ResourceProperties.EMRInstanceGroupConfigCloudWatchAlarmDefinition where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.EMRInstanceGroupConfigMetricDimension
-- | Full data type definition for
-- EMRInstanceGroupConfigCloudWatchAlarmDefinition. See
-- 'emrInstanceGroupConfigCloudWatchAlarmDefinition' for a more convenient
-- constructor.
data EMRInstanceGroupConfigCloudWatchAlarmDefinition =
EMRInstanceGroupConfigCloudWatchAlarmDefinition
{ _eMRInstanceGroupConfigCloudWatchAlarmDefinitionComparisonOperator :: Val Text
, _eMRInstanceGroupConfigCloudWatchAlarmDefinitionDimensions :: Maybe [EMRInstanceGroupConfigMetricDimension]
, _eMRInstanceGroupConfigCloudWatchAlarmDefinitionEvaluationPeriods :: Maybe (Val Integer)
, _eMRInstanceGroupConfigCloudWatchAlarmDefinitionMetricName :: Val Text
, _eMRInstanceGroupConfigCloudWatchAlarmDefinitionNamespace :: Maybe (Val Text)
, _eMRInstanceGroupConfigCloudWatchAlarmDefinitionPeriod :: Val Integer
, _eMRInstanceGroupConfigCloudWatchAlarmDefinitionStatistic :: Maybe (Val Text)
, _eMRInstanceGroupConfigCloudWatchAlarmDefinitionThreshold :: Val Double
, _eMRInstanceGroupConfigCloudWatchAlarmDefinitionUnit :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON EMRInstanceGroupConfigCloudWatchAlarmDefinition where
toJSON EMRInstanceGroupConfigCloudWatchAlarmDefinition{..} =
object $
catMaybes
[ (Just . ("ComparisonOperator",) . toJSON) _eMRInstanceGroupConfigCloudWatchAlarmDefinitionComparisonOperator
, fmap (("Dimensions",) . toJSON) _eMRInstanceGroupConfigCloudWatchAlarmDefinitionDimensions
, fmap (("EvaluationPeriods",) . toJSON) _eMRInstanceGroupConfigCloudWatchAlarmDefinitionEvaluationPeriods
, (Just . ("MetricName",) . toJSON) _eMRInstanceGroupConfigCloudWatchAlarmDefinitionMetricName
, fmap (("Namespace",) . toJSON) _eMRInstanceGroupConfigCloudWatchAlarmDefinitionNamespace
, (Just . ("Period",) . toJSON) _eMRInstanceGroupConfigCloudWatchAlarmDefinitionPeriod
, fmap (("Statistic",) . toJSON) _eMRInstanceGroupConfigCloudWatchAlarmDefinitionStatistic
, (Just . ("Threshold",) . toJSON) _eMRInstanceGroupConfigCloudWatchAlarmDefinitionThreshold
, fmap (("Unit",) . toJSON) _eMRInstanceGroupConfigCloudWatchAlarmDefinitionUnit
]
-- | Constructor for 'EMRInstanceGroupConfigCloudWatchAlarmDefinition'
-- containing required fields as arguments.
emrInstanceGroupConfigCloudWatchAlarmDefinition
:: Val Text -- ^ 'emrigccwadComparisonOperator'
-> Val Text -- ^ 'emrigccwadMetricName'
-> Val Integer -- ^ 'emrigccwadPeriod'
-> Val Double -- ^ 'emrigccwadThreshold'
-> EMRInstanceGroupConfigCloudWatchAlarmDefinition
emrInstanceGroupConfigCloudWatchAlarmDefinition comparisonOperatorarg metricNamearg periodarg thresholdarg =
EMRInstanceGroupConfigCloudWatchAlarmDefinition
{ _eMRInstanceGroupConfigCloudWatchAlarmDefinitionComparisonOperator = comparisonOperatorarg
, _eMRInstanceGroupConfigCloudWatchAlarmDefinitionDimensions = Nothing
, _eMRInstanceGroupConfigCloudWatchAlarmDefinitionEvaluationPeriods = Nothing
, _eMRInstanceGroupConfigCloudWatchAlarmDefinitionMetricName = metricNamearg
, _eMRInstanceGroupConfigCloudWatchAlarmDefinitionNamespace = Nothing
, _eMRInstanceGroupConfigCloudWatchAlarmDefinitionPeriod = periodarg
, _eMRInstanceGroupConfigCloudWatchAlarmDefinitionStatistic = Nothing
, _eMRInstanceGroupConfigCloudWatchAlarmDefinitionThreshold = thresholdarg
, _eMRInstanceGroupConfigCloudWatchAlarmDefinitionUnit = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition.html#cfn-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition-comparisonoperator
emrigccwadComparisonOperator :: Lens' EMRInstanceGroupConfigCloudWatchAlarmDefinition (Val Text)
emrigccwadComparisonOperator = lens _eMRInstanceGroupConfigCloudWatchAlarmDefinitionComparisonOperator (\s a -> s { _eMRInstanceGroupConfigCloudWatchAlarmDefinitionComparisonOperator = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition.html#cfn-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition-dimensions
emrigccwadDimensions :: Lens' EMRInstanceGroupConfigCloudWatchAlarmDefinition (Maybe [EMRInstanceGroupConfigMetricDimension])
emrigccwadDimensions = lens _eMRInstanceGroupConfigCloudWatchAlarmDefinitionDimensions (\s a -> s { _eMRInstanceGroupConfigCloudWatchAlarmDefinitionDimensions = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition.html#cfn-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition-evaluationperiods
emrigccwadEvaluationPeriods :: Lens' EMRInstanceGroupConfigCloudWatchAlarmDefinition (Maybe (Val Integer))
emrigccwadEvaluationPeriods = lens _eMRInstanceGroupConfigCloudWatchAlarmDefinitionEvaluationPeriods (\s a -> s { _eMRInstanceGroupConfigCloudWatchAlarmDefinitionEvaluationPeriods = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition.html#cfn-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition-metricname
emrigccwadMetricName :: Lens' EMRInstanceGroupConfigCloudWatchAlarmDefinition (Val Text)
emrigccwadMetricName = lens _eMRInstanceGroupConfigCloudWatchAlarmDefinitionMetricName (\s a -> s { _eMRInstanceGroupConfigCloudWatchAlarmDefinitionMetricName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition.html#cfn-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition-namespace
emrigccwadNamespace :: Lens' EMRInstanceGroupConfigCloudWatchAlarmDefinition (Maybe (Val Text))
emrigccwadNamespace = lens _eMRInstanceGroupConfigCloudWatchAlarmDefinitionNamespace (\s a -> s { _eMRInstanceGroupConfigCloudWatchAlarmDefinitionNamespace = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition.html#cfn-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition-period
emrigccwadPeriod :: Lens' EMRInstanceGroupConfigCloudWatchAlarmDefinition (Val Integer)
emrigccwadPeriod = lens _eMRInstanceGroupConfigCloudWatchAlarmDefinitionPeriod (\s a -> s { _eMRInstanceGroupConfigCloudWatchAlarmDefinitionPeriod = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition.html#cfn-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition-statistic
emrigccwadStatistic :: Lens' EMRInstanceGroupConfigCloudWatchAlarmDefinition (Maybe (Val Text))
emrigccwadStatistic = lens _eMRInstanceGroupConfigCloudWatchAlarmDefinitionStatistic (\s a -> s { _eMRInstanceGroupConfigCloudWatchAlarmDefinitionStatistic = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition.html#cfn-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition-threshold
emrigccwadThreshold :: Lens' EMRInstanceGroupConfigCloudWatchAlarmDefinition (Val Double)
emrigccwadThreshold = lens _eMRInstanceGroupConfigCloudWatchAlarmDefinitionThreshold (\s a -> s { _eMRInstanceGroupConfigCloudWatchAlarmDefinitionThreshold = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition.html#cfn-elasticmapreduce-instancegroupconfig-cloudwatchalarmdefinition-unit
emrigccwadUnit :: Lens' EMRInstanceGroupConfigCloudWatchAlarmDefinition (Maybe (Val Text))
emrigccwadUnit = lens _eMRInstanceGroupConfigCloudWatchAlarmDefinitionUnit (\s a -> s { _eMRInstanceGroupConfigCloudWatchAlarmDefinitionUnit = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/EMRInstanceGroupConfigCloudWatchAlarmDefinition.hs
|
mit
| 8,289 | 0 | 13 | 573 | 904 | 512 | 392 | 67 | 1 |
module FVL.EF
( F.Result(..)
, Expr(..)
, showTranslation
, run
, ParseError
, parseRun
, parseFileRun
) where
import FVL.Algebra
import qualified FVL.FAST as FAST
import qualified FVL.F as F
import FVL.EFAST
import FVL.Parser
argument :: String -> [String] -> Bool
argument s [] = False
argument s (x:xs) = if x == s then True else argument s xs
recursiveAlg :: String -> Algebra FAST.Expr Bool
recursiveAlg _ (FAST.CInt n) = False
recursiveAlg _ (FAST.CBool b) = False
recursiveAlg s (FAST.CVar s') = if s' == s then True else False
recursiveAlg _ (FAST.Add x y) = x || y
recursiveAlg _ (FAST.Sub x y) = x || y
recursiveAlg _ (FAST.Mul x y) = x || y
recursiveAlg _ (FAST.Div x y) = x || y
recursiveAlg _ (FAST.And x y) = x || y
recursiveAlg _ (FAST.Or x y) = x || y
recursiveAlg _ (FAST.Not x) = x
recursiveAlg _ (FAST.Equal x y) = x || y
recursiveAlg _ (FAST.Less x y) = x || y
recursiveAlg _ FAST.Empty = False
recursiveAlg _ (FAST.Cons x y) = x || y
recursiveAlg _ (FAST.If p x y) = p || x || y
recursiveAlg s (FAST.Function s' p) = if s' == s then False else p
recursiveAlg _ (FAST.Appl f x) = f || x
recursiveAlg s (FAST.LetRec f x p e) = if f == s
then False
else if x == s then p else p || e
recursiveAlg _ (FAST.Case p x _ _ y) = p || x || y
recursive :: String -> Fix FAST.Expr -> Bool
recursive s = cata $ recursiveAlg s
createWrapper :: [String] -> Fix FAST.Expr -> Fix FAST.Expr
createWrapper [] e = e
createWrapper (x:xs) e = Fx $ FAST.Function x (createWrapper xs e)
letTransform :: String -> Fix FAST.Expr -> Fix FAST.Expr -> Fix FAST.Expr
letTransform s x y = Fx $ FAST.Appl (Fx $ FAST.Function s y) x
modTransform :: Fix FAST.Expr -> Fix FAST.Expr -> Fix FAST.Expr
modTransform x y = let y' = Fx $ FAST.Mul y (Fx $ FAST.Div x y)
in Fx $ FAST.Sub x y'
lteTransform :: Fix FAST.Expr -> Fix FAST.Expr -> Fix FAST.Expr
lteTransform x y = Fx $ FAST.Or (Fx $ FAST.Less x y) (Fx $ FAST.Equal x y)
gtTransform :: Fix FAST.Expr -> Fix FAST.Expr -> Fix FAST.Expr
gtTransform x y = Fx . FAST.Not $ lteTransform x y
gteTransform :: Fix FAST.Expr -> Fix FAST.Expr -> Fix FAST.Expr
gteTransform x y = Fx . FAST.Not . Fx $ FAST.Less x y
alg :: Algebra Expr (Fix FAST.Expr)
alg (CInt n) = Fx $ FAST.CInt n
alg (CBool b) = Fx $ FAST.CBool b
alg (CVar s) = Fx $ FAST.CVar s
alg (Add x y) = Fx $ FAST.Add x y
alg (Sub x y) = Fx $ FAST.Sub x y
alg (Mul x y) = Fx $ FAST.Mul x y
alg (Div x y) = Fx $ FAST.Div x y
alg (Mod x y) = modTransform x y
alg (And x y) = Fx $ FAST.And x y
alg (Or x y) = Fx $ FAST.Or x y
alg (Not x) = Fx $ FAST.Not x
alg (Equal x y) = Fx $ FAST.Equal x y
alg (Less x y) = Fx $ FAST.Less x y
alg (LessEq x y) = lteTransform x y
alg (Great x y) = gtTransform x y
alg (GreatEq x y) = gteTransform x y
alg Empty = Fx FAST.Empty
alg (Cons x y) = Fx $ FAST.Cons x y
alg (If p x y) = Fx $ FAST.If p x y
alg (Function s p) = Fx $ FAST.Function s p
alg (Appl f x) = Fx $ FAST.Appl f x
alg (Let f [] p e) = letTransform f p e
alg (Let f (a:as) p e) = if recursive f p
then Fx $ FAST.LetRec f a (createWrapper as p) e
else letTransform f (createWrapper (a:as) p) e
alg (Semi x y) = Fx $ FAST.Appl (Fx $ FAST.Function "_" y) x
alg (Case p x s t y) = Fx $ FAST.Case p x s t y
translate :: Fix Expr -> Fix FAST.Expr
translate = cata alg
showTranslation :: Fix Expr -> String
showTranslation = show . translate
run :: Fix Expr -> F.Result
run = F.run . translate
parseRun :: String -> Either ParseError F.Result
parseRun s = case parseString s of
Left e -> Left e
Right e -> Right $ run e
parseFileRun :: FilePath -> IO (Either ParseError F.Result)
parseFileRun p = do
r <- parseFile p
case r of
Left e -> return $ Left e
Right e -> return . Right $ run e
|
burz/Feval
|
FVL/EF.hs
|
mit
| 3,733 | 0 | 14 | 856 | 2,024 | 1,004 | 1,020 | 98 | 5 |
module Geometry (
sphereVolume,
sphereArea,
cubeVolume,
cubeArea,
cuboidVolume,
cuboidArea
) where
sphereVolume :: Float -> Float
sphereVolume radius = (4.0 / 3.0) * pi * (radius ^ 3)
sphereArea :: Float -> Float
sphereArea radius = 4 * pi * (radius ^ 2)
cubeVolume :: Float -> Float
cubeVolume side = cuboidVolume side side side
cubeArea :: Float -> Float
cubeArea side = cuboidArea side side side
cuboidVolume :: Float -> Float -> Float -> Float
cuboidVolume a b c = rectangleArea a b * c
cuboidArea :: Float -> Float -> Float -> Float
cuboidArea a b c = rectangleArea a b * 2 + rectangleArea a c * 2 + rectangleArea c b * 2
rectangleArea :: Float -> Float -> Float
rectangleArea a b = a * b
|
afronski/playground-fp
|
books/learn-you-a-haskell-for-great-good/modules/Geometry.hs
|
mit
| 718 | 0 | 10 | 155 | 272 | 142 | 130 | 21 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Main where
import Nanomsg
import Test.Framework.TH (defaultMainGenerator)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.QuickCheck
import Test.QuickCheck.Monadic
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as C
import Control.Concurrent (threadDelay)
import Control.Applicative ( (<$>) )
import Data.Maybe (catMaybes)
instance Arbitrary ByteString where
arbitrary = C.pack <$> arbitrary
-- dummy test
prop_reverse :: [Int] -> Bool
prop_reverse xs =
xs == reverse (reverse xs)
-- test Pub and Sub sockets
prop_PubSub :: Property
prop_PubSub = monadicIO $ do
(msgs :: [ByteString]) <- pick arbitrary
pre $ not (null msgs)
res <- run $ do
pub <- socket Pub
ep1 <- bind pub "inproc://pubsub"
sub1 <- socket Sub
ep2 <- connect sub1 "inproc://pubsub"
subscribe sub1 $ C.pack ""
sub2 <- socket Sub
ep3 <- connect sub2 "inproc://pubsub"
subscribe sub2 $ C.pack ""
threadDelay 1000
r <- mapM (sendMsg pub sub1 sub2) msgs
unsubscribe sub2 $ C.pack ""
unsubscribe sub1 $ C.pack ""
shutdown sub2 ep3
shutdown sub1 ep2
shutdown pub ep1
close pub
close sub1
close sub2
threadDelay 1000
return r
assert $ and res
where
sendMsg pub sub1 sub2 msg = do
send pub msg
send pub msg
a <- recv sub1
b <- recv sub1
c <- recv sub2
d <- recv sub2
return $ a == msg && b == msg && c == msg && d == msg
-- test Pair sockets
prop_Pair :: Property
prop_Pair = monadicIO $ do
(msgs :: [ByteString]) <- pick arbitrary
pre $ not (null msgs)
res <- run $ do
s1 <- socket Pair
_ <- bind s1 "inproc://pair"
s2 <- socket Pair
_ <- connect s2 "inproc://pair"
threadDelay 1000
-- Send message from s1 to s2, then back from s2 to s1, then make sure it hasn't changed
r <- mapM (\m -> send s1 m >> recv s2 >>= send s2 >> recv s1 >>= return . (== m)) msgs
close s1
close s2
threadDelay 1000
return r
assert $ and res
-- test Pipeline (Push & Pull) sockets
prop_Pipeline :: Property
prop_Pipeline = monadicIO $ do
(msgs :: [ByteString]) <- pick arbitrary
pre $ not (null msgs)
res <- run $ do
push <- socket Push
_ <- bind push "inproc://pipeline"
pull1 <- socket Pull
pull2 <- socket Pull
_ <- connect pull1 "inproc://pipeline"
_ <- connect pull2 "inproc://pipeline"
threadDelay 1000
r <- mapM (testSockets push pull1 pull2) msgs
close push
close pull1
close pull2
threadDelay 1000
return r
assert $ and res
where
testSockets push pull1 pull2 msg = do
send push msg
send push msg
send push msg
threadDelay 1000
a <- recv' pull1
b <- recv' pull1
c <- recv' pull1
d <- recv' pull2
e <- recv' pull2
f <- recv' pull2
let xs = catMaybes [a, b, c, d, e, f]
return $ all (== msg) xs && (length xs == 3)
-- test Req and Rep sockets
prop_ReqRep :: Property
prop_ReqRep = monadicIO $ do
(msgs :: [ByteString]) <- pick arbitrary
pre $ not (null msgs)
res <- run $ do
req <- socket Req
_ <- bind req "inproc://reqrep"
rep <- socket Rep
_ <- connect rep "inproc://reqrep"
threadDelay 1000
r <- mapM (\m -> send req m >> recv rep >>= send rep >> recv req >>= return . (== m)) msgs
close req
close rep
threadDelay 1000
return r
assert $ and res
-- test Bus socket
prop_Bus :: Property
prop_Bus = monadicIO $ do
(msgs :: [ByteString]) <- pick arbitrary
pre $ not (null msgs)
res <- run $ do
-- Probably not how you're supposed to connect Bus nodes..
b1 <- socket Bus
_ <- bind b1 "inproc://bus1"
b2 <- socket Bus
_ <- connect b2 "inproc://bus1"
_ <- bind b2 "inproc://bus2"
b3 <- socket Bus
_ <- connect b3 "inproc://bus2"
_ <- bind b3 "inproc://bus3"
_ <- connect b1 "inproc://bus3"
threadDelay 1000
r <- mapM (testSockets b1 b2 b3) msgs
close b1
close b2
close b3
threadDelay 1000
return r
assert $ and res
where
testSockets b1 b2 b3 msg = do
send b1 msg
a <- recv b2
b <- recv b3
send b2 msg
c <- recv b1
d <- recv b3
send b3 msg
e <- recv b1
f <- recv b2
return $ all (== msg) [a, b, c, d, e, f]
prop_TestOptions :: Property
prop_TestOptions = monadicIO $ do
res <- run $ do
req <- socket Req
_ <- bind req "tcp://*:5560"
surveyor <- socket Surveyor
_ <- bind surveyor "inproc://surveyor"
threadDelay 1000
setTcpNoDelay req 1
v1 <- tcpNoDelay req
setTcpNoDelay req 0
v2 <- tcpNoDelay req
setRequestResendInterval req 30000
v3 <- requestResendInterval req
setIpv4Only req 0
v4 <- ipv4Only req
setIpv4Only req 1
v5 <- ipv4Only req
setSndPrio req 7
v6 <- sndPrio req
setReconnectInterval req 50
v7 <- reconnectInterval req
setReconnectIntervalMax req 400
v8 <- reconnectIntervalMax req
setRcvBuf req 200000
v9 <- rcvBuf req
setSndBuf req 150000
v10 <- sndBuf req
setLinger req 500
v11 <- linger req
setSurveyorDeadline surveyor 2000
v12 <- surveyorDeadline surveyor
close req
close surveyor
threadDelay 1000
return [v1 == 1, v2 == 0, v3 == 30000, v4 == 0, v5 == 1, v6 == 7,
v7 == 50, v8 == 400, v9 == 200000, v10 == 150000, v11 == 500, v12 == 2000]
assert $ and res
main :: IO ()
main = $defaultMainGenerator
|
ivarnymoen/nanomsg-haskell
|
tests/Properties.hs
|
mit
| 6,398 | 0 | 21 | 2,363 | 2,098 | 943 | 1,155 | 190 | 1 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module Main (main) where
import qualified Graphics.UI.SDL as SDL
import qualified Graphics.UI.SDL.Image as Image
import Graphics.UI.SDL.Types
import Control.Monad.State hiding (state)
import Shared.Assets
import Shared.Drawing
import Shared.Lifecycle
import Shared.Polling
import Shared.Utilities
import Shared.State
title :: String
title = "lesson17"
size :: ScreenSize
size = (640, 480)
inWindow :: (SDL.Window -> IO ()) -> IO ()
inWindow = withSDL . withWindow title size
initialState :: World
initialState = World { gameover = False, quadrants = map makeEntity allPositions }
makeEntity :: Position -> Entity
makeEntity pos = Entity { mouseState = MouseOut, position = pos }
main :: IO ()
main = withSDLContext $ \renderer ->
withAssets renderer ["./assets/mouse_states.png"] $ \assets -> do
let inputSource = collectEvents `into` updateState
let pollDraw = inputSource ~>~ drawWorld renderer assets
runStateT (repeatUntilComplete pollDraw) initialState
withSDLContext :: (SDL.Renderer -> IO ()) -> IO ()
withSDLContext f = inWindow $ \window -> Image.withImgInit [Image.InitPNG] $ do
_ <- setHint "SDL_RENDER_SCALE_QUALITY" "0" >>= logWarning
renderer <- createRenderer window (-1) [SDL.SDL_RENDERER_ACCELERATED, SDL.SDL_RENDERER_PRESENTVSYNC] >>= either throwSDLError return
_ <- f renderer
SDL.destroyRenderer renderer
data World = World { gameover :: Bool, quadrants :: [Entity] }
data Entity = Entity { mouseState :: EntityState, position :: Position }
data Position = TopLeft | TopRight | BottomLeft | BottomRight deriving (Eq, Enum, Bounded)
data EntityState = MouseOut | MouseOver | MouseDown | MouseUp
drawWorld :: SDL.Renderer -> [Asset] -> World -> IO ()
drawWorld renderer assets world = withBlankScreen renderer $ mapM render' (quadrants world)
where (texture, w, h) = head assets
render' entity = with2 (maskFor entity) (positionFor entity) (SDL.renderCopy renderer texture)
sprite = toRect 0 0 (w `div` 2) (h `div` 2)
maskFor entity = maskFromState sprite (mouseState entity)
positionFor entity = sprite `moveTo` positionToPoint (position entity)
within :: (Int, Int) -> (Int, Int) -> Bool
within (mx, my) (px, py) = withinX && withinY
where withinX = px < mx && mx < px + 320
withinY = py < my && my < py + 240
maskFromState :: SDL.Rect -> EntityState -> SDL.Rect
maskFromState sprite MouseOut = sprite `moveTo` positionToPoint TopLeft
maskFromState sprite MouseOver = sprite `moveTo` positionToPoint TopRight
maskFromState sprite MouseDown = sprite `moveTo` positionToPoint BottomLeft
maskFromState sprite MouseUp = sprite `moveTo` positionToPoint BottomRight
positionToPoint :: Position -> (Int, Int)
positionToPoint TopLeft = (0, 0)
positionToPoint TopRight = (320, 0)
positionToPoint BottomLeft = (0, 240)
positionToPoint BottomRight = (320, 240)
allPositions :: [Position]
allPositions = [minBound .. ]
updateState :: (Foldable f) => f SDL.Event -> World -> World
updateState events world = foldl applyEvent world events
applyEvent :: World -> SDL.Event -> World
applyEvent world (SDL.QuitEvent _ _) = world { gameover = True }
applyEvent world (SDL.MouseMotionEvent _ _ _ _ _ x y _ _) = world { quadrants = updatedEntities }
where updatedEntities = map (makeNewEntity (x, y) MouseOver) allPositions
applyEvent world (SDL.MouseButtonEvent evtType _ _ _ _ _ _ x y)
| evtType == SDL.SDL_MOUSEBUTTONDOWN = world { quadrants = updatedEntities MouseDown }
| evtType == SDL.SDL_MOUSEBUTTONUP = world { quadrants = updatedEntities MouseUp }
| otherwise = world
where updatedEntities ms = map (makeNewEntity (x, y) ms) allPositions
applyEvent world _ = world
makeNewEntity :: (Integral a) => (a, a) -> EntityState -> Position -> Entity
makeNewEntity (x', y') ms pos = Entity { mouseState = newState, position = pos }
where newState = getMouseState pos (x, y) ms
(x, y) = (fromIntegral x', fromIntegral y')
getMouseState :: Position -> (Int, Int) -> EntityState -> EntityState
getMouseState pos (x, y) ms
| (x, y) `within` n = ms
| otherwise = MouseOut
where n = positionToPoint pos
repeatUntilComplete :: (Monad m) => m World -> m ()
repeatUntilComplete game = game >>= \world -> unless (gameover world) $ repeatUntilComplete game
toRect :: (Integral a) => a -> a -> a -> a -> SDL.Rect
toRect x y w h = SDL.Rect { rectX = fromIntegral x, rectY = fromIntegral y, rectW = fromIntegral w, rectH = fromIntegral h }
moveTo :: (Integral a1, Integral a2) => SDL.Rect -> (a1, a2) -> SDL.Rect
moveTo rect (x, y) = rect { rectX = fromIntegral x, rectY = fromIntegral y }
|
Rydgel/haskellSDL2Examples
|
src/lesson17.hs
|
gpl-2.0
| 4,735 | 0 | 15 | 879 | 1,673 | 905 | 768 | 89 | 1 |
module Cryptography.ComputationalProblems.Abstract where
import Notes
import Functions.Basics.Macro
import Functions.Basics.Terms
import Logic.FirstOrderLogic.Macro
import Probability.Distributions.Macro
import Probability.ProbabilityMeasure.Macro
import Probability.ProbabilityMeasure.Terms
import Probability.RandomVariable.Macro
import Probability.RandomVariable.Terms
import Relations.Orders.Macro
import Relations.Orders.Terms hiding
(order)
import Sets.Basics.Terms
import Sets.Partition.Terms
import Cryptography.ComputationalProblems.Games.Terms
import Cryptography.ComputationalProblems.Abstract.Macro
import Cryptography.ComputationalProblems.Abstract.Terms
abstractSS :: Note
abstractSS = subsection "Abstract computational problems" $ do
performanceFunctionDefinition
subsubsection "Hardness" $ do
aSolverDefinition
informationTheoreticalHardness
subsubsection "Cases" $ do
worstCaseDefinition
distributionCaseDefinition
averageCaseDefinition
averageCasePerformanceDifference
probCaseNotation
performanceFunctionDefinition :: Note
performanceFunctionDefinition = nte $ do
de $ do
lab problemDefinitionLabel
lab solverDefinitionLabel
lab performanceDefinitionLabel
lab performanceValueDefinitionLabel
lab performanceFunctionDefinitionLabel
let sl = "s"
s ["Let", m probl_, "be an abstract", problem', and, m solvs_, "a", set, "of abstract", solvers', for, m probl_]
s ["Let", m perfs_, "be a", set, "of so-called", performanceValues', "associated with", m probl_]
s ["A", performanceFunction', "is a", function, "as follows"]
ma $ func perff_ solvs_ perfs_ sl (perf_ sl)
nte $ do
s ["Performance values are often real numbers, for example a success", probability, "or a distinguishing", advantage]
s ["In the simplest case,", performanceValues, "are binary"]
aSolverDefinition :: Note
aSolverDefinition = de $ do
s ["Let", m probl_, "be a", searchProblem, and, m solvs_, "a", set, "of", solvers, for, m probl_]
let po = partord_
s ["Let", m perfs_, "be the", set, performanceValues, "associated with", m probl_, "such that", m perfs_, "is equipped with a", partialOrder, m po]
let a = "a"
s ["A", solver, "for which the following holds is called an", nSolver' a, "for", m probl_, "if the following holds"]
let sl = "s"
ma $ fa (sl ∈ solvs_) (a ⊆: perf_ sl)
informationTheoreticalHardness :: Note
informationTheoreticalHardness = do
de $ do
s ["Let", m probl_, "be a", searchProblem, and, m solvs_, "a", set, "of", solvers, for, m probl_]
let po = partord_
s ["Let", m perfs_, "be the", set, performanceValues, "associated with", m probl_, "such that", m perfs_, "is equipped with a", partialOrder, m po]
let sl = "s"
e = epsilon
s ["If every", solver, m sl, "has a", performance, "smaller than some", m e <> ", we call", m probl_, "information-theoreticall", or, "unconditionally", eHard' e]
ma $ fa (sl ∈ solvs_) (perf_ sl ⊆: e)
nte $ do
s ["A statement like this is often called information-theoretic or unconditional hardness because it holds for any solver, independently of its complexity"]
worstCaseDefinition :: Note
worstCaseDefinition = de $ do
lab worstCaseProblemDefinitionLabel
let p = mathcal "P"
s ["Let", m p, "be a", set, "of", problems, and, m $ solvs p, "a", set, "of solvers for all of those", problems]
s ["We define the", worstCaseProblem', m $ spwc p, "as the abstract", problem, "for which any", solver <> "'s", performance, "is defined as the", infimum, "over all the", performances, "of the", solver, "for the", problems, "in", m p]
let pp = "p"
sl = "s"
ma $ perf (spwc p) sl =: infcomp (pp ∈ p) (perf pp sl)
distributionCaseDefinition :: Note
distributionCaseDefinition = de $ do
let p = mathcal "P"
s ["Let", m p, "be a", set, "of", problems, and, m $ solvs p, "a", set, "of solvers for all of those", problems]
let d = "D"
ppp = "P"
s ["Let", m d, "be a", probabilityDistribution, "on a", m p <> "-" <> randomVariable, m ppp]
s ["We define the", weightedAverageCaseProblem', "over", m d, or, dProblem' d, m $ spdc d p, "as the abstract", problem, "for which any", solver <> "'s", performance, "is defined as the weighted average over all the", performances, "of the", solver, "for the", problems, "in", m p, "according to the", distribution, m d]
let pp = "p"
sl = "s"
ma $ perf (spdc d p) sl =: sumcmp (pp ∈ p) (prdsm d pp * perf pp sl)
s ["In terms of the random variable, that looks as follows"]
ma $ perf (spdc d p) sl =: sumcmp (pp ∈ p) (prob (ppp =: pp) * perf pp sl)
averageCaseDefinition :: Note
averageCaseDefinition = de $ do
let p = mathcal "P"
s ["Let", m p, "be a", set, "of", problems, and, m $ solvs p, "a", set, "of solvers for all of those", problems]
s ["We define the", averageCaseProblem, "as the", dProblem uniformD_, for, m p]
averageCasePerformanceDifference :: Note
averageCasePerformanceDifference = lem $ do
let pp = mathcal "P"
p = "P"
q = "Q"
d = ("D" !:)
dp = d p
dq = d q
oo = perfs ""
s ["Let", m pp, "be a set of", problems]
s ["Let", m p, and, m q, "be the", weightedAverageCaseProblems, over, m dp, and, m dq, "respectively"]
s ["Let their", performances, "be", m $ oo ⊆ reals]
-- s ["Let", m p, and, m q, "be two", weightedAverageCaseProblems, with, probabilityDistributions, m dp, and, m dq, respectively, "with the same", set, "of", performances, m $ oo ⊆ reals]
let o = "o"
o1 = o !: 1
o2 = o !: 2
sl = "s"
ma $ fa sl $ perf p sl <= perf q sl + (pars $ max (cs [o1, o2] ∈ oo) (abs $ o1 - o2)) * statd p q
proof $ do
let a = "a"
b = "b"
s ["Let", m $ ccint a b, "be any interval that is a superset of", m oo]
ma $ oo ⊆ ccint a b
s ["We will prove this by showing that the following inequality holds, which implies the theorem, because", m $ b - a >= (pars $ max (cs [o1, o2] ∈ oo) (abs $ o1 - o2)), "holds"]
ma $ perf p sl - perf q sl <= (pars $ b - a) * statd p q
s ["Consider the following two", sets, "of", problem, "instances"]
let pr = "p"
ppl = pp ^ "+"
ma $ ppl =: setcmpr (pr ∈ p) (perf p pr >= perf q pr)
let pmn = pp ^ "-"
ma $ pmn =: setcmpr (pr ∈ q) (perf p pr < perf q pr)
s ["Note that", m ppl, and, m pmn, "form a", partition, "of", m pp]
s ["Therefore, we note the following first"]
aligneqs
( sumcmp (pr ∈ ppl) (pars $ prob (p =: pr) - prob (q =: pr))
+ sumcmp (pr ∈ pmn) (pars $ prob (p =: pr) - prob (q =: pr))
)
[ sumcmp (pr ∈ pp) (pars $ prob (p =: pr) - prob (q =: pr))
, sumcmp (pr ∈ pp) (prob (p =: pr))
- sumcmp (pr ∈ pp) (prob (q =: pr))
, 1 - 1
, 0
]
s ["This is equivalent with the following equation"]
ma $ sumcmp (pr ∈ ppl) (pars $ prob (p =: pr) - prob (q =: pr)) =: - sumcmp (pr ∈ pmn) (pars $ prob (p =: pr) - prob (q =: pr))
s ["We now write the", statisticalDistance, "between", m p, and, m q, "in terms of that equation"]
aligneqs
(statd p q)
[ (1 / 2) * sumcmp (pr ∈ pp) (abs $ prob (p =: pr) - prob (q =: pr))
, (1 / 2) * (pars $
sumcmp (pr ∈ ppl) (abs $ prob (p =: pr) - prob (q =: pr))
+ sumcmp (pr ∈ pmn) (abs $ prob (p =: pr) - prob (q =: pr)))
, (1 / 2) * (pars $
sumcmp (pr ∈ ppl) (prob (p =: pr) - prob (q =: pr))
- sumcmp (pr ∈ pmn) (prob (p =: pr) - prob (q =: pr)))
, (1 / 2) * (pars $
sumcmp (pr ∈ ppl) (prob (p =: pr) - prob (q =: pr))
+ sumcmp (pr ∈ ppl) (prob (p =: pr) - prob (q =: pr)))
, sumcmp (pr ∈ ppl) (pars $ prob (p =: pr) - prob (q =: pr))
]
s ["Now we use this formula for the statistical distance to finally prove the inequality stated above"]
aligneqs
(perf p sl - perf q sl)
[ sumcmp (pr ∈ pp) (prob (p =: pr) * perf pr sl)
- sumcmp (pr ∈ pp) (prob (q =: pr) * perf pr sl)
, sumcmp (pr ∈ pp) (pars $ prob (p =: pr) * perf pr sl - prob (q =: pr) * perf pr sl)
, sumcmp (pr ∈ pp) ((pars $ prob (p =: pr) - prob (q =: pr)) * perf pr sl)
, sumcmp (pr ∈ ppl) ((pars $ prob (p =: pr) - prob (q =: pr)) * perf pr sl)
+ sumcmp (pr ∈ pmn) ((pars $ prob (p =: pr) - prob (q =: pr)) * perf pr sl)
]
s ["Now we use that", m $ perf pr sl, "is less than, or equal to", m b, "and that", m $ perf pr sl, "is greater than, or equal to,", m a]
ma $ "" <= b * sumcmp (pr ∈ ppl) (pars $ prob (p =: pr) - prob (q =: pr))
+ a * sumcmp (pr ∈ pmn) (pars $ prob (p =: pr) - prob (q =: pr))
s ["We finish by noting that the second factor of the first term in the rigth-hand side of this inequality, is the", statisticalDistance, "between", m p, and, m q, "while the second factor of the second term is the exact opposite of that"]
ma $ "" =: b * statd p q - a * statd p q =: (pars $ b - a) * statd p q
probCaseNotation :: Note
probCaseNotation = de $ do
let p = "p"
s ["Usually many problem can be described as being a specific instance with respect to some key information in what's called an", instanceSpace]
s ["We then use the following notation"]
itemize $ do
item $ do
s ["We use", m $ spwc p, "to mean", m p, "in the worst-case"]
item $ do
let d = "D"
s ["We use", m $ spdc d p, "to mean", m p, "in the case of the distribution", m d]
item $ do
s ["We use", m $ spac p, "to mean", m p, "in the average-case"]
|
NorfairKing/the-notes
|
src/Cryptography/ComputationalProblems/Abstract.hs
|
gpl-2.0
| 10,349 | 0 | 23 | 3,108 | 3,764 | 1,959 | 1,805 | 177 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Handler.Loan where
import Import
import Handler.Home (loanForm
,displayInputForm
,initLoan
)
import Handler.Util (Loan (..)
,initErrors
,loanValidation
,anyError
,presentLoan
,newAbsLoan
,presentLoanWithoutFee
,total
,feeAmor
,RoundingType (..)
,Freq (..)
,Amount
,Rate
,showAmt
,isBalloon
,isUnfoldedBalloon
,simpleLoanHash
,Financing (..)
,LoanErrors
,fieldInstDay
,fieldFinDate
,instDayField
,financingForm
,initFinancing
,displayInputFormFin
,getLoanFromSession
)
--import qualified Data.Csv as CSV
--import qualified Data.List as List (head)
import qualified Data.Map.Strict as Map
-- The POST handler processes the form. If it is successful, it displays the
-- parsed person. Otherwise, it displays the form again with error messages.
postLoanR :: Handler Html
postLoanR = do
((result, widget), enctype) <- runFormPost $ loanForm (initLoan) initErrors
case result of
FormSuccess loan -> do
let ip = presentLoan loan
ipWF = presentLoanWithoutFee loan
cl = calc $ newAbsLoan loan
setLoanToSession loan
(widget, enctype) <- generateFormPost $ loanForm (loan) (loanValidation loan)
(widgetFin, enctypeFin) <- generateFormPost $ financingForm initFinancing initErrors
defaultLayout $ case (anyError $ loanValidation loan) of
True -> displayInputForm MsgCalculatorValidation widget enctype
False -> do
let isFee = feeAmountS loan /= Nothing && feeAmountS loan /= Just 0 ||
feePercentS loan /= Nothing && feePercentS loan /= Just 0
displayInputForm MsgLoanScreen widget enctype
renderLoanOverview loan (fee cl)
displayInputFormFin loan MsgFinancing widgetFin enctypeFin
renderFoldedInstalmentPlan (installments cl) (rNom cl) (rEffRec cl)
if isFee
then renderIPTwice ip ipWF (feeAmor (fee cl) (map trdOf5 ip) (map trdOf5 ipWF))
else renderIP ip
_ -> defaultLayout $ displayInputForm MsgCalculatorValidation widget enctype
getLoanR :: Handler Html
getLoanR = getLoanFromSession >>= \l ->
case l of
Just loan -> do
let ip = presentLoan loan
ipWF = presentLoanWithoutFee loan
cl = calc $ newAbsLoan loan
(widget, enctype) <- generateFormPost $ loanForm (loan) (loanValidation loan)
(widgetFin, enctypeFin) <- generateFormPost $ financingForm initFinancing initErrors
defaultLayout $ case (anyError $ loanValidation loan) of
True -> displayInputForm MsgCalculatorValidation widget enctype
False -> do
let isFee = feeAmountS loan /= Nothing && feeAmountS loan /= Just 0 ||
feePercentS loan /= Nothing && feePercentS loan /= Just 0
displayInputForm MsgLoanScreen widget enctype
renderLoanOverview loan (fee cl)
displayInputFormFin loan MsgFinancing widgetFin enctypeFin
renderFoldedInstalmentPlan (installments cl) (rNom cl) (rEffRec cl)
if isFee
then renderIPTwice ip ipWF (feeAmor (fee cl) (map trdOf5 ip) (map trdOf5 ipWF))
else renderIP ip
_ -> redirect HomeR --defaultLayout $ displayInputForm MsgCalculatorValidation widget enctype
-- | Storying loan details in section for purpose of retreiving calculated details via GET (CSV,XSL,...)
setLoanToSession :: MonadHandler m => Loan -> m ()
setLoanToSession loan = do
setSession "Loan" (pack $ ccConfFun $ loanS loan)
setSession "Principal" (pack $ show $ principalS loan)
setSession "Duration" (pack $ show $ durationS loan)
setSession "Rate" (pack $ show $ rateS loan)
setSession "Delay" (pack $ show $ delayS loan)
setSession "Balloon" (pack $ show $ balloonS loan)
setSession "ExtDur" (pack $ show $ extDurS loan)
setSession "ExtRate" (pack $ show $ extRateS loan)
setSession "FeeAmt" (pack $ show $ feeAmountS loan)
setSession "FeePer" (pack $ show $ feePercentS loan)
renderLoanOverview :: Loan -> Amount -> Widget
renderLoanOverview l feee = do
let isFee = feee > 0
[whamlet|
<h2 #result>_{MsgLoan}: #{ccConfName $ loanS l}
<table .table .table-bordered .table-layout-fixed>
<tr>
<td .strong .my-text-center>#{showAmtWithLen 10 $ principalS l}
<td .strong .my-text-center>#{show $ durationS l} _{MsgMonths $ durationS l}
<td .strong .my-text-center>#{showWithLenDec 7 3 $ rateS l * 100} %
<tr>
$maybe del <- delayS l
$if del > 0
<td .small .my-text-right>_{MsgDeferrment}
<td .strong colspan=2>#{show del} _{MsgMonths del}
<tr>
$maybe bal <- balloonS l
$if isBalloon (clType $ loanS l)
<td .small .my-text-right colspan=2>_{MsgBalloon}
<td .strong>#{showAmtWithLen 10 $ bal}
<tr>
$maybe ext <- extDurS l
$if isUnfoldedBalloon (clType $ loanS l)
<td .small .my-text-right colspan=2>_{MsgMaxExtDur}
<td .strong>#{show ext} _{MsgMonths ext}
<tr>
$maybe extR <- extRateS l
$if isUnfoldedBalloon (clType $ loanS l)
<td .small .my-text-right colspan=2>_{MsgExtRate}
<td .strong>#{show (100 * extR)} %
<tr>
$maybe fA <- feeAmountS l
<td .small .my-text-right colspan=2>_{MsgFeeAmt}
<td .strong>#{showAmtWithLen 10 fA}
<tr>
$maybe fP <- feePercentS l
<td .small .my-text-right colspan=2>_{MsgFeePercent}
<td .strong>#{showWithLenDec 7 3 (100 * fP)} %
<tr>
$if isFee
<td .small .my-text-right colspan=2>_{MsgFeeAmt}
<td .strong>#{showAmtWithLen 10 feee}
|]
renderFoldedInstalmentPlan :: InstPlan -> [Double] -> [Double] -> Widget
renderFoldedInstalmentPlan ip rs ers = do
let fip = zip3 ip rs ers
[whamlet|
<h2>_{MsgFIP}
<table .table .table-hover>
<tr>
<th .my-text-right>_{MsgInstallment}
<th .my-text-right>_{MsgNbrInst}
<th .my-text-right>_{MsgNomRate}
<th .my-text-right>_{MsgRecEffRate}
$forall (fipl,rN, rE) <- fip
<tr>
<td .my-text-amount>#{showAmtWithLen 10 (snd fipl)}
<td .my-text-amount>#{show $ fst fipl}
<td .my-text-amount>#{showWithLenDec 13 9 $ (rN * 100)} %
<td .my-text-amount>#{showWithLenDec 7 3 $ (cN2E rE * 100)} %
|]
-- | Render a form into a series of tr tags. Note that, in order to allow
-- you to add extra rows to the table, this function does /not/ wrap up
-- the resulting HTML in a table tag; you must do that yourself.
renderIP :: AmorPlan -> Widget
renderIP ip = do
let ipc = zip ip [1::(Int)..]
let (tiAmt,tiRep,tiI) = total ip
[whamlet|
<h2>_{MsgFullInstPlan}
<table .table .table-hover>
<tr>
<th .my-text-right> ##
<th .my-text-right>_{MsgInstallment}
<th .my-text-right>_{MsgRepayment}
<th .my-text-right>_{MsgInterestPaid}
<th .my-text-right>_{MsgPrincipalAfterPayment}
<th .my-text-right>_{MsgLateInterest}
$forall (ipl,counter) <- ipc
<tr>
<td .my-text-amount>#{counter}
<td .my-text-amount>#{showAmtWithLen 10 (fstOf5 ipl)}
<td .my-text-amount>#{showAmtWithLen 10 (sndOf5 ipl)}
<td .my-text-amount>#{showAmtWithLen 10 (trdOf5 ipl)}
<td .my-text-amount>#{showAmtWithLen 10 (frthOf5 ipl)}
<td .my-text-amount>#{showAmtWithLen 10 (fvthOf5 ipl)}
<tfoot>
<tr .footer>
<td .my-text-right>_{MsgTotal}
<td .my-text-amount>#{showAmtWithLen 10 tiAmt}
<td .my-text-amount>#{showAmtWithLen 10 tiRep}
<td .my-text-amount>#{showAmtWithLen 10 tiI}
<td .my-text-amount>
<td .my-text-amount>
|]
renderIPTwice :: AmorPlan -> AmorPlan -> [Amount] -> Widget
renderIPTwice ip ipWF fees = do
let ipc = zip4 ip ipWF fees [1::(Int)..]
let (tiAmt,tiRep,tiI) = total ip
let (tiAmtWF,tiRepWF,tiIWF) = total ipWF
[whamlet|
<h2>_{MsgFullInstPlan}
<table .table .table-hover>
<tr>
<th colspan="6" .td-border-right .my-text-center>_{MsgSelectedProduct}
<th colspan="5" .td-border-right .my-text-center>_{MsgReferenceProductWithoutFee}
<th .my-text-center>
<tr>
<th .my-text-right> ##
<th .my-text-right>_{MsgInstallment}
<th .my-text-right>_{MsgRepayment}
<th .my-text-right>_{MsgInterestPaid}
<th .my-text-right>_{MsgPrincipalAfterPayment}
<th .my-text-right .td-border-right>_{MsgLateInterest}
<th .my-text-right>_{MsgInstallment}
<th .my-text-right>_{MsgRepayment}
<th .my-text-right>_{MsgInterestPaid}
<th .my-text-right>_{MsgPrincipalAfterPayment}
<th .my-text-right .td-border-right>_{MsgLateInterest}
<th .my-text-right>_{MsgFeeAmortisation}
$forall (ipl, iplWF, fee, counter) <- ipc
<tr>
<td .my-text-amount>#{counter}
<td .my-text-amount>#{showAmtWithLen 10 (fstOf5 ipl)}
<td .my-text-amount>#{showAmtWithLen 10 (sndOf5 ipl)}
<td .my-text-amount>#{showAmtWithLen 10 (trdOf5 ipl)}
<td .my-text-amount>#{showAmtWithLen 10 (frthOf5 ipl)}
<td .my-text-amount .td-border-right>#{showAmtWithLen 10 (fvthOf5 ipl)}
<td .my-text-amount>#{showAmtWithLen 10 (fstOf5 iplWF)}
<td .my-text-amount>#{showAmtWithLen 10 (sndOf5 iplWF)}
<td .my-text-amount>#{showAmtWithLen 10 (trdOf5 iplWF)}
<td .my-text-amount>#{showAmtWithLen 10 (frthOf5 iplWF)}
<td .my-text-amount .td-border-right>#{showAmtWithLen 10 (fvthOf5 iplWF)}
<td .my-text-amount>#{showAmtWithLen 10 fee}
<tfoot>
<tr .footer>
<td .my-text-right>_{MsgTotal}
<td .my-text-amount>#{showAmtWithLen 10 tiAmt}
<td .my-text-amount>#{showAmtWithLen 10 tiRep}
<td .my-text-amount>#{showAmtWithLen 10 tiI}
<td .my-text-amount>
<td .my-text-amount .td-border-right>
<td .my-text-amount>#{showAmtWithLen 10 tiAmtWF}
<td .my-text-amount>#{showAmtWithLen 10 tiRepWF}
<td .my-text-amount>#{showAmtWithLen 10 tiIWF}
<td .my-text-amount>
<td .my-text-amount .td-border-right>
<td .my-text-amount>
|]
|
bartoszw/yelca
|
Handler/Loan.hs
|
gpl-2.0
| 12,972 | 0 | 28 | 5,199 | 1,395 | 705 | 690 | -1 | -1 |
{- |
Module : $EmptyHeader$
Description : <optional short description entry>
Copyright : (c) <Authors or Affiliations>
License : GPLv2 or higher, see LICENSE.txt
Maintainer : <email>
Stability : unstable | experimental | provisional | stable | frozen
Portability : portable | non-portable (<reason>)
<optional description>
-}
module Proof where
import Structured
import Parser
import StaticAnalysis
prove :: LogicGraph -> Bool -> String -> [String] -> IO()
prove logicGraph flat spec raw_goals = do
if flat then proveFlat th (getGoals th)
else proveStruct th env (getGoals th)
where
as = hetParse logicGraph spec
Just (env,th) = staticAnalysis as
getGoals (G_theory id (sig,ax)) =
map (G_sentence id . parse_sentence id sig) raw_goals
proveFlat th goals = do
res <- sequence (map (proveFlat1 th) goals)
putStrLn (show res)
proveFlat1 (G_theory id (sig,ax)) (G_sentence _ goal) =
prover id (sig,ax) (coerce1 goal)
proveStruct (G_theory id (sig,ax)) env goals = do
res <- sequence (map (prove1 env) goals)
putStrLn (show res)
where
prove1 :: Env -> G_sentence -> IO Proof_status
prove1 env g@(G_sentence id goal) = case env of
Basic_env (G_theory id' (sig,ax)) ->
prover id' (sig,ax) (coerce1 goal)
Intra_Translation_env th env' (G_morphism id' mor) ->
let goal' = coerce1 goal in
case inv_map_sentence id' mor goal' of
Just goal'' -> prove1 env' (G_sentence id' goal'')
Nothing -> proveFlat1 th g
Inter_Translation_env th env' (G_LTR tr) ->
prove_aux th
where
prove_aux (G_theory _ (sig,_)) =
case inv_tr_sen tr (coerce1 sig) (coerce1 goal) of
Just goal'' -> prove1 env' (G_sentence (source tr) goal'')
Nothing -> proveFlat1 th g
Extension_env _ env1 env2 -> do
res <- prove1 env1 g
case res of
Proved -> return Proved
_ -> prove1 env2 g
|
nevrenato/Hets_Fork
|
mini/Proof.hs
|
gpl-2.0
| 2,021 | 0 | 21 | 560 | 647 | 318 | 329 | 40 | 8 |
{---------------------------------------------------------------------}
{- Copyright 2015, 2016 Nathan Bloomfield -}
{- -}
{- This file is part of Feivel. -}
{- -}
{- Feivel is free software: you can redistribute it and/or modify -}
{- it under the terms of the GNU General Public License version 3, -}
{- as published by the Free Software Foundation. -}
{- -}
{- Feivel is distributed in the hope that it will be useful, but -}
{- WITHOUT ANY WARRANTY; without even the implied warranty of -}
{- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -}
{- GNU General Public License for more details. -}
{- -}
{- You should have received a copy of the GNU General Public License -}
{- along with Feivel. If not, see <http://www.gnu.org/licenses/>. -}
{---------------------------------------------------------------------}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module Feivel.Grammar.Expr (
ToExpr, toExpr,
-- Expression Types
Expr(..),
Doc(..), DocLeaf(..),
StrExpr(..), StrExprLeafS, StrExprLeaf(..),
IntExpr(..), IntExprLeafS, IntExprLeaf(..),
BoolExpr(..), BoolExprLeafS, BoolExprLeaf(..),
RatExpr(..), RatExprLeafS, RatExprLeaf(..),
ZZModExpr(..), ZZModExprLeafS, ZZModExprLeaf(..),
ListExpr(..), ListExprLeafS, ListExprLeaf(..), ListGuard(..),
MatExpr(..), MatExprLeafS, MatExprLeaf(..),
PolyExpr(..), PolyExprLeafS, PolyExprLeaf(..),
PermExpr(..), PermExprLeafS, PermExprLeaf(..),
MacExpr(..), MacExprLeafS, MacExprLeaf(..),
TupleExpr(..), TupleExprLeafS, TupleExprLeaf(..)
) where
import Feivel.Grammar.Util
import Feivel.Grammar.Doc
import Feivel.Grammar.ZZMod
import Feivel.Grammar.Perm
import Feivel.Grammar.Mac
import Feivel.Grammar.Poly
import Feivel.Grammar.Rat
import Feivel.Grammar.Mat
import Feivel.Grammar.Str
import Feivel.Grammar.Bool
import Feivel.Grammar.List
import Feivel.Grammar.Int
import Feivel.Grammar.Tuple
{------------}
{- :IntExpr -}
{------------}
type IntExprLeafS = IntExprLeaf
IntExpr StrExpr BoolExpr RatExpr ZZModExpr
ListExpr MatExpr TupleExpr PolyExpr MacExpr
Expr
newtype IntExpr = IntExpr
{ unIntExpr :: AtLocus IntExprLeafS
} deriving (Eq, Show)
instance HasLocus IntExpr where
locusOf = locusOf . unIntExpr
instance Typed IntExpr where typeOf _ = ZZ
{------------}
{- :StrExpr -}
{------------}
type StrExprLeafS = StrExprLeaf
IntExpr StrExpr BoolExpr RatExpr ZZModExpr
ListExpr MatExpr TupleExpr PolyExpr MacExpr
Expr
newtype StrExpr = StrExpr
{ unStrExpr :: AtLocus StrExprLeafS
} deriving (Eq, Show)
instance HasLocus StrExpr where
locusOf = locusOf . unStrExpr
instance Typed StrExpr where typeOf _ = SS
{-------------}
{- :BoolExpr -}
{-------------}
type BoolExprLeafS = BoolExprLeaf Expr IntExpr BoolExpr ListExpr MatExpr TupleExpr
newtype BoolExpr = BoolExpr
{ unBoolExpr :: AtLocus BoolExprLeafS
} deriving (Eq, Show)
instance HasLocus BoolExpr where
locusOf = locusOf . unBoolExpr
instance Typed BoolExpr where typeOf _ = BB
{------------}
{- :RatExpr -}
{------------}
type RatExprLeafS = RatExprLeaf Expr BoolExpr IntExpr ListExpr MatExpr RatExpr TupleExpr
newtype RatExpr = RatExpr
{ unRatExpr :: AtLocus RatExprLeafS
} deriving (Eq, Show)
instance HasLocus RatExpr where
locusOf = locusOf . unRatExpr
instance Typed RatExpr where typeOf _ = QQ
{--------------}
{- :ZZModExpr -}
{--------------}
type ZZModExprLeafS = ZZModExprLeaf Expr BoolExpr IntExpr ListExpr MatExpr ZZModExpr TupleExpr
newtype ZZModExpr = ZZModExpr
{ unZZModExpr :: AtLocus (OfType ZZModExprLeafS)
} deriving (Eq, Show)
instance HasLocus ZZModExpr where
locusOf = locusOf . unZZModExpr
instance Typed ZZModExpr where
typeOf (ZZModExpr (_ :# typ :@ _)) = typ
{-------------}
{- :ListExpr -}
{-------------}
type ListExprLeafS = ListExprLeaf Expr BoolExpr IntExpr ListExpr MatExpr TupleExpr
newtype ListExpr = ListExpr
{ unListExpr :: AtLocus (OfType ListExprLeafS)
} deriving (Eq, Show)
instance HasLocus ListExpr where
locusOf = locusOf . unListExpr
instance Typed ListExpr where
typeOf (ListExpr (_ :# typ :@ _)) = ListOf typ
{- :MatExpr -}
type MatExprLeafS = MatExprLeaf Expr BoolExpr IntExpr ListExpr MatExpr TupleExpr
newtype MatExpr = MatExpr
{ unMatExpr :: AtLocus (OfType MatExprLeafS)
} deriving (Eq, Show)
instance HasLocus MatExpr where
locusOf = locusOf . unMatExpr
instance Typed MatExpr where
typeOf (MatExpr (_ :# typ :@ _)) = MatOf typ
{- :PolyExpr -}
type PolyExprLeafS = PolyExprLeaf Expr BoolExpr IntExpr ListExpr MatExpr PolyExpr TupleExpr
newtype PolyExpr = PolyExpr
{ unPolyExpr :: AtLocus (OfType PolyExprLeafS)
} deriving (Eq, Show)
instance HasLocus PolyExpr where
locusOf = locusOf . unPolyExpr
instance Typed PolyExpr where
typeOf (PolyExpr (_ :# typ :@ _)) = PolyOver typ
{- :PermExpr -}
type PermExprLeafS = PermExprLeaf Expr BoolExpr IntExpr ListExpr MatExpr PermExpr TupleExpr
newtype PermExpr = PermExpr
{ unPermExpr :: AtLocus (OfType PermExprLeafS)
} deriving (Eq, Show)
instance HasLocus PermExpr where
locusOf = locusOf . unPermExpr
instance Typed PermExpr where
typeOf (PermExpr (_ :# typ :@ _)) = PermOf typ
{- :MacExpr -}
type MacExprLeafS = MacExprLeaf Expr BoolExpr IntExpr ListExpr MatExpr MacExpr TupleExpr
newtype MacExpr = MacExpr
{ unMacExpr :: AtLocus (OfType MacExprLeafS)
} deriving (Eq, Show)
instance HasLocus MacExpr where
locusOf = locusOf . unMacExpr
instance Typed MacExpr where
typeOf (MacExpr (_ :# typ :@ _)) = MacTo typ
{- :TupleExpr -}
type TupleExprLeafS = TupleExprLeaf Expr BoolExpr IntExpr ListExpr MatExpr TupleExpr
newtype TupleExpr = TupleExpr
{ unTupleExpr :: AtLocus (OfType TupleExprLeafS)
} deriving (Eq, Show)
instance HasLocus TupleExpr where
locusOf = locusOf . unTupleExpr
instance Typed TupleExpr where
typeOf (TupleExpr (_ :# typ :@ _)) = typ
{- :Doc -}
newtype Doc = Doc
{ unDoc :: AtLocus (DocLeaf Expr Doc StrExpr)
} deriving (Eq, Show)
instance HasLocus Doc where
locusOf = locusOf . unDoc
instance Typed Doc where typeOf _ = DD
data Expr
= DocE Doc
| BoolE BoolExpr
| StrE StrExpr
| IntE IntExpr
| RatE RatExpr
| ZZModE ZZModExpr
| ListE ListExpr
| MatE MatExpr
| PolyE PolyExpr
| PermE PermExpr
| MacE MacExpr
| TupleE TupleExpr
deriving (Eq, Show)
instance HasLocus Expr where
locusOf (DocE x) = locusOf x
locusOf (StrE x) = locusOf x
locusOf (IntE x) = locusOf x
locusOf (BoolE x) = locusOf x
locusOf (RatE x) = locusOf x
locusOf (ZZModE x) = locusOf x
locusOf (ListE x) = locusOf x
locusOf (MatE x) = locusOf x
locusOf (PolyE x) = locusOf x
locusOf (PermE x) = locusOf x
locusOf (MacE x) = locusOf x
locusOf (TupleE x) = locusOf x
{- :ToExpr -}
class ToExpr t where
toExpr :: t -> Expr
instance ToExpr Expr where toExpr = id
instance ToExpr Doc where toExpr = DocE
instance ToExpr BoolExpr where toExpr = BoolE
instance ToExpr StrExpr where toExpr = StrE
instance ToExpr IntExpr where toExpr = IntE
instance ToExpr RatExpr where toExpr = RatE
instance ToExpr ZZModExpr where toExpr = ZZModE
instance ToExpr ListExpr where toExpr = ListE
instance ToExpr MatExpr where toExpr = MatE
instance ToExpr PolyExpr where toExpr = PolyE
instance ToExpr PermExpr where toExpr = PermE
instance ToExpr MacExpr where toExpr = MacE
instance ToExpr TupleExpr where toExpr = TupleE
-- NB: Not a fan of "no locus" here
-- NB: Is this even needed? Can we use put instead?
instance ToExpr Text where toExpr t = StrE $ StrExpr $ StrConst t :@ NullLocus
instance Typed Expr where
typeOf (StrE x) = typeOf x
typeOf (IntE x) = typeOf x
typeOf (RatE x) = typeOf x
typeOf (BoolE x) = typeOf x
typeOf (ListE x) = typeOf x
typeOf (MacE x) = typeOf x
typeOf (DocE x) = typeOf x
typeOf (MatE x) = typeOf x
typeOf (PolyE x) = typeOf x
typeOf (PermE x) = typeOf x
typeOf (ZZModE x) = typeOf x
typeOf (TupleE x) = typeOf x
instance HasLocus Text where
locusOf _ = NullLocus
|
nbloomf/feivel
|
src/Feivel/Grammar/Expr.hs
|
gpl-3.0
| 8,493 | 0 | 11 | 1,926 | 2,233 | 1,243 | 990 | 184 | 0 |
module Main where
import Development.Hake
import Development.Hake.FunSetRaw
import Variables
main = hake rules
rules = [
dflt [ "hello" ]
,
rule exeSffx objSffx $ \t s -> [ [cc, "-o", t] ++ s ]
,
rule objSffx cSffx $ \_ s -> [ [cc, "-c"] ++ s ]
,
task "clean" [ ["rm", "hello" ++ exeSffx, "hello" ++ objSffx] ]
]
|
YoshikuniJujo/hake_haskell
|
examples/testHakefileIs/hakeMain.hs
|
gpl-3.0
| 326 | 0 | 10 | 75 | 135 | 78 | 57 | 10 | 1 |
import XMonad
main = xmonad $ def
{
borderWidth = 10,
terminal = "terminator",
normalBorderColor = "#ffffff",
focusedBorderColor = "#0000ee"
}
|
wno-git/dotfiles
|
xmonad/.xmonad/xmonad.hs
|
gpl-3.0
| 184 | 0 | 7 | 62 | 39 | 24 | 15 | 6 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Util.Date (
PrettyDiffTime (..), NominalDiffTime, Rfc822Date (..)
, getDiffTime
) where
import Prelude
import Control.Monad
import Control.Monad.IO.Class
import Data.Time.Clock (
UTCTime (..), NominalDiffTime, getCurrentTime, diffUTCTime
)
import Data.Time.Format (formatTime)
import System.Locale (defaultTimeLocale, rfc822DateFormat)
import Util.Pretty (PrettyDiffTime (..))
-- | A new type to wrap a date so it will be displayed in a standard way.
newtype Rfc822Date = Rfc822Date { unRfc822Date :: UTCTime }
instance Show Rfc822Date where
show = formatTime defaultTimeLocale rfc822DateFormat . unRfc822Date
-- | Returns the difference of time between the current time and the given time.
getDiffTime :: MonadIO m => UTCTime -> m NominalDiffTime
getDiffTime time = (`diffUTCTime` time) `liftM` liftIO getCurrentTime
|
RaphaelJ/getwebb.org
|
Util/Date.hs
|
gpl-3.0
| 894 | 0 | 7 | 146 | 185 | 115 | 70 | 17 | 1 |
{-# LANGUAGE TemplateHaskell, TypeFamilies, TypeSynonymInstances, FlexibleInstances, DeriveDataTypeable #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Web.Crew.User.DB where
import Control.Monad.Reader
import Control.Monad.State
import Data.Acid
import qualified Data.Map.Strict as M
import qualified Data.ByteString as B
import qualified Data.Text as T
import Web.Crew.User.Types
peekUserWithEmail :: T.Text -> Query Users (Maybe User)
peekUserWithEmail email = do
(Users _ users) <- ask
let users' = M.elems users
user = foldl accum Nothing users'
accum Nothing u = if _userEmail u == email then Just u else Nothing
accum (Just u) _ = Just u
return user
peekUserWithId :: Id -> Query Users (Maybe User)
peekUserWithId uid = do
(Users _ users) <- ask
return $ M.lookup uid users
updateUser :: User -> Update Users ()
updateUser user = do
(Users n m) <- get
let m' = M.update (\_ -> Just user) (_userId user) m
put $ Users n m'
addUser :: T.Text -> T.Text -> B.ByteString -> B.ByteString -> Update Users ()
addUser name email pass salt = do
(Users n m) <- get
let u = User (Id n) name email pass salt zeroDay
m' = M.insert (Id n) u m
put $ Users (succ n) m'
peekUsers :: Query Users [User]
peekUsers = do
(Users _ m) <- ask
return $ M.elems m
$(makeAcidic ''Users ['peekUserWithEmail, 'peekUserWithId, 'updateUser, 'addUser, 'peekUsers])
|
schell/scottys-crew
|
Web/Crew/User/DB.hs
|
gpl-3.0
| 1,480 | 0 | 13 | 356 | 536 | 276 | 260 | 38 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module Misc.HtmlSpec where
import Data.Text (Text)
import qualified Data.Text as Text
import Misc.Html
import SpecHelper
data Person = Person
{ nP :: String
, mP :: String
, oP :: String
}
spec :: Spec
spec =
describe "Core.HtmlSpec" $ do
context "Token parser" $
it "parses simple string" $ do
res <-
[parse|html
div { class= 'hello', id= "hihi" }
| hi
|]
res `shouldBe` "<html><div class=\"hello\" id=\"hihi\">hi</div></html>"
context "UTF-8 Text" $
it "parses simple utf-8" $ do
res <-
[parse|html
div
| HU
|]
res `shouldBe` Text.pack "<html><div>HU</div></html>"
context "Monadic Context" $ do
it "parses monad combination" $ do
let inner =
[parse|p
| inner
|] :: IO Text
res <-
[parse|html
div
^ inner
| outer
|]
res `shouldBe` Text.pack "<html><div><p>inner</p>outer</div></html>"
it "parses monad combinating function" $ do
let inner v =
[parse|p
| inner
= v
|]
let arg = "center" :: Text
res <-
[parse|html
div
^ inner arg
| outer
|]
res `shouldBe`
Text.pack "<html><div><p>innercenter</p>outer</div></html>"
context "Simple Text" $ do
it "parses simple tag" $ do
res <-
[parse|html
div
| Hello
|]
res `shouldBe` "<html><div>Hello</div></html>"
it "parses simple variable" $ do
let theValue = "VALUE" :: Text
res <-
[parse|html
div
= theValue
|]
res `shouldBe` "<html><div>VALUE</div></html>"
it "parses simple function" $ do
let theFunc x = Text.concat ["---", x, "---"]
let theVal = "HELLO"
res <-
[parse|html
div
= theFunc theVal
|]
res `shouldBe` "<html><div>---HELLO---</div></html>"
it "parses simple function with string" $ do
let theFunc x = Text.concat ["---", x, "---"]
res <-
[parse|html
div
= theFunc "HIHI"
|]
res `shouldBe` "<html><div>---HIHI---</div></html>"
it "parses simple tag" $ do
res <-
[parse|html
div
| Hello
|]
res `shouldBe` "<html><div>Hello</div></html>"
it "processes simple map statement" $ do
let people = ["A", "B"] :: [Text]
res <-
[parse|div
- map people -> name
p
= name
|]
res `shouldBe` "<div><p>A</p><p>B</p></div>"
it "processes complex map statement" $ do
let people = [Person "A" "Bb" "C", Person "D" "Ee" "F"]
res <-
[parse|div
- map people -> Person aE bE cE
p
$ aE
span { class $ reverse bE }
$ cE
|]
res `shouldBe`
"<div><p>A<span class=\"bB\">C</span></p><p>D<span class=\"eE\">F</span></p></div>"
context "If statement" $ do
it "parses true statement" $ do
res <-
[parse|html
div
- if trueStatement
p
| Hello
|]
res `shouldBe` "<html><div><p>Hello</p></div></html>"
it "parses false statement" $ do
res <-
[parse|html
div
- if falseStatement
p
| Hello
|]
res `shouldBe` "<html><div></div></html>"
it "applies true function" $ do
res <-
[parse|html
div
- if greaterThan four three
p
| Hello
|]
res `shouldBe` "<html><div><p>Hello</p></div></html>"
it "applies false function" $ do
res <-
[parse|html
div
- if greaterThan three four
p
| Hello
|]
res `shouldBe` "<html><div></div></html>"
where
trueStatement = True
falseStatement = False
greaterThan = (>) :: Integer -> Integer -> Bool
three = 3 :: Integer
four = 4 :: Integer
main :: IO ()
main = hspec spec
|
inq/agitpunkt
|
spec/Misc/HtmlSpec.hs
|
agpl-3.0
| 4,491 | 0 | 18 | 1,897 | 823 | 439 | 384 | 100 | 1 |
{-# language FlexibleInstances, MultiParamTypeClasses, DeriveFunctor #-}
-- | state monad that can be caught
module Control.Monad.CatchState (
CatchState,
runCatchState,
) where
import Control.Monad.State.Class
import Control.Monad.IO.Class
import Control.Monad.CatchIO as CatchIO
import Control.Concurrent.MVar
import Utils
-- | Uses an MVar internally to access the state.
-- Makes sure, the MVar is not empty for a longer time
-- (nor when an exception can be thrown.)
data CatchState state m a =
CatchState {innerAction :: (MVar state -> m a)}
deriving (Functor)
instance Applicative m => Applicative (CatchState state m) where
pure a = CatchState $ const $ pure a
CatchState f <*> CatchState a = CatchState $ \ mvar ->
f mvar <*> a mvar
instance Monad m => Monad (CatchState state m) where
(CatchState a) >>= b =
CatchState $ \ mvar -> do
tmp <- a mvar
innerAction (b tmp) mvar
return x = CatchState (const $ return x)
instance MonadCatchIO m => MonadState state (CatchState state m) where
get = CatchState $ \ mvar ->
block $ io $ readMVar mvar
put state = CatchState $ \ mvar ->
block $ io $ ignore $ swapMVar mvar state
instance MonadIO m => MonadIO (CatchState state m) where
liftIO = CatchState . const . liftIO
instance MonadCatchIO m => MonadCatchIO (CatchState state m) where
catch (CatchState action) handler = CatchState $ \ mvar ->
CatchIO.catch (action mvar) (\ e -> innerAction (handler e) mvar)
block (CatchState action) = CatchState $ \ mvar ->
CatchIO.block $ action mvar
unblock (CatchState action) = CatchState $ \ mvar ->
CatchIO.unblock $ action mvar
runCatchState :: MonadIO m => CatchState state m a -> state -> m (a, state)
runCatchState (CatchState action) state = do
mvar <- io $ newMVar state
a <- action mvar
endState <- io $ takeMVar mvar
return (a, endState)
|
nikki-and-the-robots/nikki
|
src/Control/Monad/CatchState.hs
|
lgpl-3.0
| 1,948 | 0 | 13 | 455 | 628 | 320 | 308 | 42 | 1 |
{-# LANGUAGE DeriveFunctor #-}
-- Non-monadic tree labeling:
import Control.Monad
data Tr a = Lf a | Br (Tr a) (Tr a)
deriving Show
tr1 = Br (Lf 'a') (Br (Br (Lf 'b') (Lf 'a')) (Lf 'd'))
type Lt a = (Tr (S, a))
type S = Int
label :: Tr a -> Lt a
label tr = snd (lab tr 0)
where
lab :: Tr a -> S -> (S, Lt a)
lab (Lf contents) n = ((n+1), (Lf (n, contents)))
lab (Br l r) n0 = let (n1, l') = lab l n0
(n2, r') = lab r n1
in (n2, Br l' r')
-- Monadic tree labeling:
newtype Labeled anytype = Labeled (S -> (S, anytype)) deriving Functor
instance Monad Labeled where
return contents = Labeled (\st -> (st, contents))
Labeled fst0 >>= fany1 =
Labeled $ \st0 ->
let (st1, any1) = fst0 st0
Labeled fst1 = fany1 any1
in fst1 st1
instance Applicative Labeled where
pure = return
(<*>) = ap
mlabel :: Tr anytype -> Lt anytype
mlabel tr = let Labeled mt = mkm tr
in snd (mt 0)
mkm :: Tr anytype -> Labeled (Lt anytype)
mkm (Lf x)
= updateState >>= \n -> return $ Lf (n,x)
-- Alternative: do n <- updateState
-- return $ Lf (n,x)
mkm (Br l r)
= mkm l >>= \l' ->
mkm r >>= \r' ->
return $ (Br l' r')
-- Alternative: do l' <- mkm l
-- r' <- mkm r
-- return $ (Br l' r')
updateState :: Labeled S
updateState = Labeled (\n -> ((n+1),n))
main = print $ mlabel tr1
|
egaburov/funstuff
|
Haskell/monads/zenmonad/Mumble002.hs
|
apache-2.0
| 1,521 | 0 | 12 | 544 | 637 | 335 | 302 | 38 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Spark.Core.Internal.TypesFunctions(
isNullable,
iInnerStrictType,
columnType,
unsafeCastType,
intType,
arrayType,
compatibleTypes,
arrayType',
frameTypeFromCol,
colTypeFromFrame,
canNull,
structField,
structType,
structTypeFromFields,
structTypeTuple,
structTypeTuple',
tupleType,
structName,
iSingleField,
-- cellType,
) where
import Control.Monad.Except
import qualified Data.List.NonEmpty as N
import Control.Arrow(second)
import Data.Function(on)
import Data.List(sort, nub, sortBy)
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Data.Text(Text, intercalate)
import qualified Data.Vector as V
import Formatting
import Spark.Core.Internal.TypesStructures
import Spark.Core.StructuresInternal
import Spark.Core.Internal.RowGenericsFrom(FromSQL(..), TryS)
import Spark.Core.Internal.Utilities
import Spark.Core.Internal.TypesStructuresRepr(DataTypeRepr, DataTypeElementRepr)
import qualified Spark.Core.Internal.TypesStructuresRepr as DTR
import Spark.Core.Try
-- Performs a cast of the type.
-- This may throw an error if the required type b is not
-- compatible with the type embedded in a.
unsafeCastType :: SQLType a -> SQLType b
-- TODO add more error checking here.
unsafeCastType (SQLType dt) = SQLType dt
-- Given a sql type tag, returns the equivalent data type for a column or a blob
-- (internal)
columnType :: SQLType a -> DataType
columnType (SQLType dt) = dt
-- (internal)
isNullable :: DataType -> Bool
isNullable (StrictType _) = False
isNullable (NullableType _) = True
-- *** Creation of data types ***
-- Takes a data type (assumed to be that of a column or cell) and returns the
-- corresponding dataset type.
-- This should only be used when talking to Spark.
-- All visible operations in Karps use Cell types instead.
-- TODO should it use value or _1? Both seem to be used in Spark.
frameTypeFromCol :: DataType -> StructType
frameTypeFromCol (StrictType (Struct struct)) = struct
frameTypeFromCol dt = _structFromUnfields [("value", dt)]
-- Given the structural type for a dataframe or a dataset, returns the
-- equivalent column type.
colTypeFromFrame :: StructType -> DataType
colTypeFromFrame st @ (StructType fs) = case V.toList fs of
[StructField {
structFieldName = fname,
structFieldType = (StrictType dt)}] | fname == "value" ->
StrictType dt
_ -> StrictType (Struct st)
-- The strict int type
compatibleTypes :: DataType -> DataType -> Bool
compatibleTypes (StrictType sdt) (StrictType sdt') = _compatibleTypesStrict sdt sdt'
compatibleTypes (NullableType sdt) (NullableType sdt') = _compatibleTypesStrict sdt sdt'
compatibleTypes _ _ = False
-- ***** INSTANCES *****
-- In the case of source introspection, datatypes may be returned.
instance FromSQL DataType where
_cellToValue = _cellToValue >=> _sDataTypeFromRepr
_sDataTypeFromRepr :: DataTypeRepr -> TryS DataType
_sDataTypeFromRepr (DTR.DataTypeRepr l) = snd <$> _sToTreeRepr l
_sToTreeRepr :: [DataTypeElementRepr] -> TryS (Int, DataType)
_sToTreeRepr [] = throwError $ sformat "_sToTreeRepr: empty list"
_sToTreeRepr [dtr] | null (DTR.fieldPath dtr) =
-- We are at a leaf, decode the leaf
_decodeLeaf dtr []
_sToTreeRepr l = do
let f dtr = case DTR.fieldPath dtr of
[] -> []
(h : t) -> [(h, dtr')] where dtr' = dtr { DTR.fieldPath = t }
let hDtrt = case filter (null . DTR.fieldPath) l of
[dtr] -> pure dtr
_ ->
throwError $ sformat ("_decodeList: invalid top with "%sh) l
let withHeads = concatMap f l
let g = myGroupBy withHeads
let groupst = M.toList g <&> \(h, l') ->
_sToTreeRepr l' <&> second (StructField (FieldName h))
groups <- sequence groupst
checkedGroups <- _packWithIndex groups
hDtr <- hDtrt
_decodeLeaf hDtr checkedGroups
_packWithIndex :: (Show t) => [(Int, t)] -> TryS [t]
_packWithIndex l = _check 0 $ sortBy (compare `on` fst) l
-- Checks that all the elements are indexed in order by their value.
-- It works by running a counter along each element and seeing that it is here.
_check :: (Show t) => Int -> [(Int, t)] -> TryS [t]
_check _ [] = pure []
_check n ((n', x):t) =
if n == n'
then (x : ) <$> _check (n+1) t
else
throwError $ sformat ("_check: could not match arguments at index "%sh%" for argument "%sh) n ((n', x):t)
_decodeLeaf :: DataTypeElementRepr -> [StructField] -> TryS (Int, DataType)
_decodeLeaf dtr l = _decodeLeafStrict dtr l <&> \sdt ->
if DTR.isNullable dtr
then (DTR.fieldIndex dtr, NullableType sdt)
else (DTR.fieldIndex dtr, StrictType sdt)
_decodeLeafStrict :: DataTypeElementRepr -> [StructField] -> TryS StrictDataType
-- The array type
_decodeLeafStrict dtr [sf] | DTR.typeId dtr == 11 =
pure $ ArrayType (structFieldType sf)
-- Structure types
_decodeLeafStrict dtr l | DTR.typeId dtr == 10 =
pure . Struct . StructType . V.fromList $ l
_decodeLeafStrict dtr [] = case DTR.typeId dtr of
0 -> pure IntType
1 -> pure StringType
2 -> pure BoolType
n -> throwError $ sformat ("_decodeLeafStrict: unknown type magic id "%sh) n
_decodeLeafStrict dtr l =
throwError $ sformat ("_decodeLeafStrict: cannot interpret dtr="%sh%" and fields="%sh) dtr l
_compatibleTypesStrict :: StrictDataType -> StrictDataType -> Bool
_compatibleTypesStrict IntType IntType = True
_compatibleTypesStrict DoubleType DoubleType = True
_compatibleTypesStrict StringType StringType = True
_compatibleTypesStrict (ArrayType et) (ArrayType et') = compatibleTypes et et'
_compatibleTypesStrict (Struct (StructType v)) (Struct (StructType v')) =
(length v == length v') &&
and (V.zipWith compatibleTypes (structFieldType <$> v) (structFieldType <$> v'))
_compatibleTypesStrict _ _ = False
tupleType :: SQLType a -> SQLType b -> SQLType (a, b)
tupleType (SQLType dt1) (SQLType dt2) =
SQLType $ structType [structField "_1" dt1, structField "_2" dt2]
intType :: DataType
intType = StrictType IntType
-- a string
structField :: T.Text -> DataType -> StructField
structField txt = StructField (FieldName txt)
-- The strict structure type
structType :: [StructField] -> DataType
structType = StrictType . Struct . StructType . V.fromList
-- The strict array type
arrayType' :: DataType -> DataType
arrayType' = StrictType . ArrayType
-- Returns the equivalent data type that may be nulled.
canNull :: DataType -> DataType
canNull = NullableType . iInnerStrictType
-- Given a type, returns the corresponding array type.
-- This is preferred to using directly buildType, as it may encounter some
-- overlapping instances.
arrayType :: SQLType a -> SQLType [a]
arrayType (SQLType dt) = SQLType (arrayType' dt)
iInnerStrictType :: DataType -> StrictDataType
iInnerStrictType (StrictType st) = st
iInnerStrictType (NullableType st) = st
iSingleField :: DataType -> Maybe DataType
iSingleField (StrictType (Struct (StructType fields))) = case V.toList fields of
[StructField _ dt] -> Just dt
_ -> Nothing
iSingleField _ = Nothing
structName :: StructType -> Text
structName (StructType fields) =
"struct(" <> intercalate "," (unFieldName . structFieldName <$> V.toList fields) <> ")"
{-| Builds a type that is a tuple of all the given types.
Following the Spark and SQL convention, the indexing starts at 1.
-}
structTypeTuple :: N.NonEmpty DataType -> StructType
structTypeTuple dts =
let numFields = length dts
rawFieldNames = ("_" <> ) . show' <$> (1 N.:| [2..numFields])
fieldNames = N.toList $ unsafeFieldName <$> rawFieldNames
fieldTypes = N.toList dts
-- Unsafe call, but we know it is going to be all different fields
in forceRight $ structTypeFromFields (zip fieldNames fieldTypes)
{-| Returns a data type instead (the most common use case)
Note that unlike Spark and SQL, the indexing starts from 0.
-}
structTypeTuple' :: N.NonEmpty DataType -> DataType
structTypeTuple' = StrictType . Struct . structTypeTuple
structTypeFromFields :: [(FieldName, DataType)] -> Try StructType
structTypeFromFields [] = tryError "You cannot build an empty structure"
structTypeFromFields ((hfn, hdt):t) =
let fs = (hfn, hdt) : t
ct = StructType $ uncurry StructField <$> V.fromList fs
names = fst <$> fs
numNames = length names
numDistincts = length . nub $ names
in if numNames == numDistincts
then return ct
else tryError $ sformat ("Duplicate field names when building the struct: "%sh) (sort names)
_structFromUnfields :: [(T.Text, DataType)] -> StructType
_structFromUnfields l = StructType . V.fromList $ x where
x = [StructField (FieldName name) dt | (name, dt) <- l]
|
krapsh/kraps-haskell
|
src/Spark/Core/Internal/TypesFunctions.hs
|
apache-2.0
| 8,712 | 1 | 17 | 1,567 | 2,392 | 1,270 | 1,122 | 167 | 4 |
module Step_3_2 where
import Text.Html
page = thehtml <<
[ header << (thetitle << "Output")
, body <<
[ h1 << "A to-do list:"
, thediv << toDoHtml
]
]
-- We've changed the type of toDoItems to include a Bool value to
-- indicate if they've been done.
toDoItems :: [(Bool, String)] -- a list of tuples: each a Bool and String
toDoItems =
[ (True, "Pick up avocados")
, (True, "Make snacks")
, (False, "Clean house")
, (False, "Have party")
]
-- Haskellers often format lists with long items in this funny way!
renderToDo :: [String] -> [Html]
renderToDo ts = map (li <<) ts
toDoHtml :: Html
toDoHtml = ulist << renderToDo toDoItems
-- Try running this file. You'll find that the it doesn't compile, and gives
-- an error on the line above. Just read the first two lines of the error and
-- see if you can see what the compiler is trying to tell you about the problem
-- with the types.
-- Fix the renderToDo function to fix the problem.
-- NEXT
-- What did you end up doing with the Bool component of the tuple? Did you
-- ignore it? Did you use it in the computation somehow? Did you notice that
-- by having a clear type, you were forced to think about it?
-- Change the renderToDo function to render the items that aren't done bold.
-- NEXT
-- For a challenge, change the renderToDo function to render only the first
-- not done item bold.
hint1 = map pred "Zpv!qspcbcmz!ibwf!up!hp!cbdl!up!opu!vtjoh!nbq/"
hint2 = map pred "Zpv!qspcbcmz!xjmm!offe!bopuifs!ifmqfs!gvodujpo/"
|
mzero/barley
|
seed/Chapter3/Step_3_2.hs
|
apache-2.0
| 1,554 | 0 | 9 | 345 | 194 | 121 | 73 | 19 | 1 |
-- http://www.reddit.com/r/dailyprogrammer/comments/2z68di/20150316_challenge_206_easy_recurrence_relations/
module RecurrenceRelations where
type Operator = Integer -> Integer
type Expression = String
createExpression :: Integer -> [Operator] -> Integer
createExpression seed = foldl (\_ x -> x seed) seed
parse :: Expression -> [Operator]
parse = map toOperator . words
toOperator :: String -> Operator
toOperator ('*':xs) = (*) (read xs :: Integer)
toOperator ('-':xs) = (-) (read xs :: Integer)
toOperator ('/':xs) = div (read xs :: Integer)
toOperator ('+':xs) = (+) (read xs :: Integer)
toOperator _ = error "Malformed expression"
recurrence :: Expression -> Integer -> [Integer]
recurrence expr = iterate (flip createExpression $ parse expr)
|
fffej/haskellprojects
|
daily-programmer/17-03-2015/RecurrenceRelations.hs
|
bsd-2-clause
| 766 | 0 | 8 | 119 | 261 | 141 | 120 | 15 | 1 |
module Rules.Cabal (cabalRules) where
import Base
import Data.Version
import Distribution.Package as DP
import Distribution.PackageDescription
import Distribution.PackageDescription.Parse
import Distribution.Verbosity
import Expression
import GHC
import Rules.Actions
import Settings
cabalRules :: Rules ()
cabalRules = do
-- Cache boot package constraints (to be used in cabalArgs)
bootPackageConstraints %> \out -> do
bootPkgs <- interpretWithStage Stage0 getPackages
let pkgs = filter (\p -> p /= compiler && isLibrary p) bootPkgs
constraints <- forM (sort pkgs) $ \pkg -> do
need [pkgCabalFile pkg]
pd <- liftIO . readPackageDescription silent $ pkgCabalFile pkg
let identifier = package . packageDescription $ pd
version = showVersion . pkgVersion $ identifier
DP.PackageName name = DP.pkgName identifier
return $ name ++ " == " ++ version
writeFileChanged out . unlines $ constraints
-- Cache package dependencies
packageDependencies %> \out -> do
pkgs <- interpretWithStage Stage1 getPackages
pkgDeps <- forM (sort pkgs) $ \pkg ->
if pkg == rts
then return $ pkgNameString pkg
else do
need [pkgCabalFile pkg]
pd <- liftIO . readPackageDescription silent $ pkgCabalFile pkg
let depsLib = collectDeps $ condLibrary pd
depsExes = map (collectDeps . Just . snd) $ condExecutables pd
deps = concat $ depsLib : depsExes
depNames = [ name | Dependency (DP.PackageName name) _ <- deps ]
return . unwords $ pkgNameString pkg : sort depNames
writeFileChanged out . unlines $ pkgDeps
-- When the file exists, the packageConfiguration has been initialised
-- TODO: get rid of an extra file?
forM_ [Stage0, Stage1] $ \stage ->
packageConfigurationInitialised stage %> \out -> do
let target = PartialTarget stage cabal
pkgConf = packageConfiguration stage
removeDirectoryIfExists pkgConf
-- TODO: can we get rid of this fake target?
build $ fullTarget target (GhcPkg stage) [] [pkgConf]
let message = "Successfully initialised " ++ pkgConf
writeFileChanged out message
putSuccess message
collectDeps :: Maybe (CondTree v [Dependency] a) -> [Dependency]
collectDeps Nothing = []
collectDeps (Just (CondNode _ deps ifs)) = deps ++ concatMap f ifs
where
f (_, t, mt) = collectDeps (Just t) ++ collectDeps mt
|
quchen/shaking-up-ghc
|
src/Rules/Cabal.hs
|
bsd-3-clause
| 2,669 | 0 | 26 | 800 | 707 | 348 | 359 | 51 | 2 |
module Prompt
(
promptData
) where
import System.IO
type ParseFuncType a = (String -> Maybe a)
promptData :: String -> ParseFuncType a -> String -> IO a
promptData sprompt parseFunc failstr = do
putStr sprompt
hFlush stdout
input <- getLine
let maybea = parseFunc input
case maybea of
Just something -> return something
Nothing -> do
putStrLn failstr
promptData sprompt parseFunc failstr
|
monkeybits/rpgame
|
src/Prompt.hs
|
bsd-3-clause
| 429 | 0 | 12 | 103 | 138 | 65 | 73 | 16 | 2 |
module Main where
import System.Exit
main = do
putStrLn "This test always passes!"
exitSuccess
|
vollmerm/shallow-fission
|
tests/Test.hs
|
bsd-3-clause
| 115 | 0 | 7 | 34 | 23 | 12 | 11 | 5 | 1 |
{-# LANGUAGE DeriveDataTypeable, PatternGuards #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Pass.L.Estimator
-- Copyright : (C) 2012-2013 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GADTs, Rank2Types)
--
----------------------------------------------------------------------------
module Data.Pass.L.Estimator
( Estimator(..)
, Estimate(..)
, estimateBy
) where
import Data.Ratio
import Data.Binary
import Data.Data
import qualified Data.IntMap as IM
import Data.IntMap (IntMap)
import Data.Pass.Util (clamp)
import Data.Hashable
import Data.Pass.Util.Beta (Beta(Beta))
import qualified Data.Pass.Util.Beta as Beta
-- | Techniques used to smooth the nearest values when calculating quantile functions. R2 is used by default, and the numbering convention follows the
-- use in the R programming language, as far as it goes.
data Estimator
= R1 -- ^ Inverse of the empirical distribution function
| R2 -- ^ .. with averaging at discontinuities (default)
| R3 -- ^ The observation numbered closest to Np. NB: does not yield a proper median
| R4 -- ^ Linear interpolation of the empirical distribution function. NB: does not yield a proper median.
| R5 -- ^ .. with knots midway through the steps as used in hydrology. This is the simplest continuous estimator that yields a correct median
| R6 -- ^ Linear interpolation of the expectations of the order statistics for the uniform distribution on [0,1]
| R7 -- ^ Linear interpolation of the modes for the order statistics for the uniform distribution on [0,1]
| R8 -- ^ Linear interpolation of the approximate medans for order statistics.
| R9 -- ^ The resulting quantile estimates are approximately unbiased for the expected order statistics if x is normally distributed.
| R10 -- ^ When rounding h, this yields the order statistic with the least expected square deviation relative to p.
| HD -- ^ The Harrell-Davis quantile estimator based on bootstrapped order statistics
deriving (Eq,Ord,Enum,Bounded,Data,Typeable,Show,Read)
instance Binary Estimator where
put e = put (fromIntegral (fromEnum e) :: Word8)
get = do
i <- get :: Get Word8
return $ toEnum (fromIntegral i)
instance Hashable Estimator where
hashWithSalt n e = n `hashWithSalt` fromEnum e
data Estimate r = Estimate {-# UNPACK #-} !Rational (IntMap r)
deriving Show
continuousEstimator ::
Fractional r =>
(Rational -> (Rational, Rational)) ->
(Rational -> Rational -> Rational) ->
Rational -> Int -> Estimate r
continuousEstimator bds f p n = Estimate h $
if p < lo then IM.singleton 0 1
else if p >= hi then IM.singleton (n - 1) 1
else case properFraction h of
(w,frac) | frac' <- fromRational frac -> IM.fromList [(w - 1, frac'), (w, 1 - frac')]
where
r = fromIntegral n
h = f p r
(lo, hi) = bds r
estimateBy :: Fractional r => Estimator -> Rational -> Int -> Estimate r
estimateBy HD = \q n -> Estimate (1%2) $ let
n' = fromIntegral n
np1 = n' + 1
q' = fromRational q
d = Beta (q'*np1) (np1*(1-q'))
in if q == 0 then IM.singleton 0 1
else if q == 1 then IM.singleton (n - 1) 1
else IM.fromListWith (+)
[ (i, realToFrac $ Beta.cumulative d ((fromIntegral i + 1) / n') - Beta.cumulative d (fromIntegral i / n'))
| i <- [0 .. n-1]
]
estimateBy R1 = \p n -> let np = fromIntegral n * p in Estimate (np + 1%2) $ IM.singleton (clamp n (ceiling np - 1)) 1
estimateBy R2 = \p n -> let np = fromIntegral n * p in Estimate (np + 1%2) $
if p == 0 then IM.singleton 0 1
else if p == 1 then IM.singleton (n - 1) 1
else IM.fromListWith (+) [(clamp n (ceiling np - 1), 0.5), (clamp n (floor np), 0.5)]
estimateBy R3 = \p n -> let np = fromIntegral n * p in Estimate np $ IM.singleton (clamp n (round np - 1)) 1
estimateBy R4 = continuousEstimator (\n -> (recip n, 1)) (*)
estimateBy R5 = continuousEstimator (\n -> let tn = 2 * n in (recip tn, (tn - 1) / tn)) $ \p n -> p*n + 0.5
estimateBy R6 = continuousEstimator (\n -> (recip (n + 1), n / (n + 1))) $ \p n -> p*(n+1)
estimateBy R7 = continuousEstimator (\_ -> (0, 1)) $ \p n -> p*(n-1) + 1
estimateBy R8 = continuousEstimator (\n -> (2/3 / (n + 1/3), (n - 1/3)/(n + 1/3))) $ \p n -> p*(n + 1/3) + 1/3
estimateBy R9 = continuousEstimator (\n -> (0.625 / (n + 0.25), (n - 0.375)/(n + 0.25))) $ \p n -> p*(n + 0.25) + 0.375
estimateBy R10 = continuousEstimator (\n -> (1.5 / (n + 2), (n + 0.5)/(n + 2))) $ \p n -> p*(n + 2) - 0.5
|
ekmett/multipass
|
Data/Pass/L/Estimator.hs
|
bsd-3-clause
| 4,644 | 0 | 21 | 968 | 1,562 | 850 | 712 | 73 | 5 |
-- {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
module C where
import Control.Monad.State.Strict
import Data.Data
import Data.Either
import Data.Typeable
import Data.String
import Data.ByteString.Char8 (ByteString,pack,unpack)
import qualified Data.ByteString.Char8 as ByteString
import Network.Mime (defaultMimeLookup)
import System.Directory
import System.FilePath
import System.IO
import Text.Printf
import Types
import JS
-- toFile :: MonadIO m => FilePath -> Writer ByteString a -> m ()
-- toFile f wr = liftIO $ do
-- createDirectoryIfMissing True (takeDirectory f)
-- ByteString.writeFile f $ execWriter $ wr
toFile :: MonadIO m => FilePath -> StateT Handle IO () -> m ()
toFile f m = liftIO $ do
createDirectoryIfMissing True (takeDirectory f)
withFile f WriteMode (evalStateT m)
class LineLike x where
line :: x -> StateT Handle IO ()
line_ putter s = get >>= \h -> liftIO $ putter h s
instance LineLike ByteString where
line = line_ ByteString.hPutStrLn
instance LineLike [Char] where
line = line . pack
instance LineLike [[Char]] where
line s = forM_ s (\s -> line_ ByteString.hPutStr (pack s)) >> line ""
mk_c :: Args -> ByteString -> IO ()
mk_c a d = toFile (out_c a) $ do
line $ "/* http://stupefydeveloper.blogspot.ru/2008/08/cc-embed-binary-data-into-elf.html */"
line $ "#include <urweb.h>"
line $ "#include <stdio.h>"
line $ "#include \"" ++ (takeFileName (out_h a)) ++ "\""
line $ ""
line $ "#define BLOBSZ " ++ (printf "%d" (ByteString.length d))
line $ "static char blob[BLOBSZ];"
line $ ""
line $ [ "uw_Basis_blob ", cblobfun a , " (uw_context ctx, uw_unit unit)" ]
line $ "{"
line $ " uw_Basis_blob uwblob;"
line $ " uwblob.data = &blob[0];"
line $ " uwblob.size = BLOBSZ;"
line $ " return uwblob;"
line $ "}"
line $ ""
line $ [ "uw_Basis_string " , ctextfun a , " (uw_context ctx, uw_unit unit) {" ]
line $ " char* data = &blob[0];"
line $ " size_t size = sizeof(blob);"
line $ " char * c = uw_malloc(ctx, size+1);"
line $ " char * write = c;"
line $ " int i;"
line $ " for (i = 0; i < size; i++) {"
line $ " *write = data[i];"
line $ " if (*write == '\\0')"
line $ " *write = '\\n';"
line $ " *write++;"
line $ " }"
line $ " *write=0;"
line $ " return c;"
line $ " }"
line $ ""
line $ "static char blob[BLOBSZ] = {"
h <- get
liftIO $ do
forM_ (unpack d) $ \c -> do
hPutStr h (printf "0x%02X ," c)
-- liftIO $ ByteString.foldl' (\act c -> act >> hPutStr h (printf "0x%02X ," c)) (return ()) d
line $ "};"
line $ ""
mk_h :: Args -> IO ()
mk_h a = toFile (out_h a) $ do
line $ "#pragma once"
line $ "#include <urweb.h>"
line $ [ "uw_Basis_blob ", cblobfun a , " (uw_context ctx, uw_unit unit);" ]
line $ [ "uw_Basis_string ", ctextfun a, " (uw_context ctx, uw_unit unit);" ]
mk_urs :: Args -> IO ()
mk_urs a = toFile (out_urs a) $ do
line $ [ "val ", urblobfun, " : unit -> transaction blob" ]
line $ [ "val ", urtextfun, " : unit -> transaction string" ]
guessMime inf = fixup $ unpack (defaultMimeLookup (fromString inf)) where
fixup "application/javascript" = "text/javascript"
fixup m = m
mk_wrap :: Args -> [Url] -> Bool -> IO ()
mk_wrap a us open_js_ffi = do
toFile (out_wrapper a) $ do
let mm = guessMime (inp a)
line $ "open " ++ (uwModName (out_urs a))
line $ "fun content {} = b <- "++ urblobfun ++ " () ; returnBlob b (blessMime \"" ++ mm ++ "\")"
line $ "val propagated_urls : list url = "
forM_ us $ \u -> do
line $ " " ++ u ++ ".url ::"
line $ " []"
when (open_js_ffi) $ do
line $ "open " ++ (uwModName (out_ffi_js a))
line $ "val url = url(content {})"
line $ "val geturl = url"
liftIO $ printf "safeGet %s/content\n" (uwModName (out_wrapper a))
liftIO $ printf "allow mime %s\n" mm
mk_js_wrap :: Args -> ([JSType],[JSFunc]) -> IO ()
mk_js_wrap a (jt,jf) = do
let m = uwModName (out_ffi_js a)
toFile (out_ffi_js a) $ do
forM_ jt $ \decl -> line (urtdecl decl)
forM_ jf $ \decl -> do
line (urdecl decl)
liftIO $ printf "jsFunc %s.%s = %s\n" m (urname decl) (jsname decl)
liftIO $ printf "clientOnly %s.%s\n" m (urname decl)
-- mk_js_lib :: Args -> ([JSType],[JSFunc]) -> IO ()
-- mk_js_lib a (jt,jf) = do
-- toFile (out_ffi_js_lib a) $ do
-- let m = uwModName (out_ffi_js a)
-- line $ "ffi " ++ m
-- forM_ jf $ \decl -> do
|
grwlf/cake3
|
app/UrEmbed/C.hs
|
bsd-3-clause
| 4,494 | 0 | 17 | 1,047 | 1,359 | 668 | 691 | 109 | 2 |
module Validate.US where
import Data.Char
regionStrict :: String -> Bool
regionStrict reg = reg `elem` ["AL", "AK", "AZ", "AR", "CA", "CO", "CT", "DE", "DC", "FL", "GA", "HI", "ID", "IL", "IN", "IA", "KS", "KY", "LA", "ME", "MD", "MA", "MI", "MN", "MS", "MO", "MT", "NE", "NV", "NH", "NJ", "NM", "NY", "NC", "ND", "OH", "OK", "OR", "PA", "RI", "SC", "SD", "TN", "TX", "UT", "VT", "VA", "WA", "WV", "WI", "WY"]
-- Strict parameter
region :: String -> Bool
region reg = region $ map toUpper reg
-- Not implemented
ssn :: String -> Bool
ssn code = False
|
jespino/haskell-validate
|
Validate/US.hs
|
bsd-3-clause
| 555 | 0 | 6 | 98 | 232 | 146 | 86 | 8 | 1 |
module SortCheck.Forall (
MetaCtx(..),
checkForallVars
) where
import Control.Monad.Trans.State.Lazy
import Control.Monad.Except (throwError)
import Control.Monad.Trans.Class (lift)
import Control.Lens
import Data.Maybe (isJust)
import Control.Monad (when, unless)
import qualified Data.Set as Set
import AST
import SortCheck.SymbolTable as SymbolTable
type MetaCtx = [(MetaVar, Sort)]
--------------------------------------------------------------------------------
-- ForallVars
-- This function looks up a sortName in state
-- ContextDepth is needed for forming the sort (not lookup)
checkSortByName :: ContextDepth -> SortName -> SortCheckM Sort
checkSortByName depth name = do
st <- get
if Set.member name (st^.simpleSorts)
then
if depth == 0
then return (SimpleSort name)
else throwError $ "Independent sort " ++ name ++ " can't have non-empty context"
else
if Set.member name (st^.depSorts)
then return (DepSort name depth)
else
throwError $ "Sort " ++ name ++ " is not defined"
-- checks and modifies one vars and checks for dups
checkForallVars :: MetaCtx -> SortCheckM MetaCtx
checkForallVars forall = do
-- changes the sort to appropriate depth (if it's dependent at all)
forall' <- mapM (\ (a , b) -> do
b' <- checkSortByName (length $ mContext a) (getSortName b)
return (a , b') ) forall
-- check for dups in captures and x.x situations
mapM_ (\ (a , _) -> unless (allUnique (mName a : mContext a)) $
throwError "Duplicates in captures") forall'
unless (allUnique $ map (mName . fst) forall') $
throwError "Duplicates in metas"
return forall'
---
|
esengie/fpl-exploration-tool
|
src/specLang/SortCheck/Forall.hs
|
bsd-3-clause
| 1,684 | 0 | 17 | 351 | 429 | 235 | 194 | 33 | 4 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.