code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
y = f(x1) + g(Foo.x2)
| mpickering/hlint-refactor | tests/examples/Bracket38.hs | bsd-3-clause | 22 | 1 | 8 | 5 | 27 | 12 | 15 | 1 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Vimus.Render (
Render
, runRender
, getWindowSize
, addstr
, addLine
, chgat
, withColor
-- * exported to silence warnings
, Environment (..)
-- * exported for testing
, fitToColumn
) where
import Control.Applicative
import Control.Monad.Reader
import UI.Curses hiding (wgetch, ungetch, mvaddstr, err, mvwchgat, addstr, wcolor_set)
import Data.Char.WCWidth
import Vimus.Widget.Type
import Vimus.WindowLayout
data Environment = Environment {
environmentWindow :: Window
, environmentOffsetY :: Int
, environmentOffsetX :: Int
, environmentSize :: WindowSize
}
newtype Render a = Render (ReaderT Environment IO a)
deriving (Functor, Monad, Applicative)
runRender :: Window -> Int -> Int -> WindowSize -> Render a -> IO a
runRender window y x ws (Render action) = runReaderT action (Environment window y x ws)
getWindowSize :: Render WindowSize
getWindowSize = Render (asks environmentSize)
-- | Translate given coordinates and run given action
--
-- The action is only run, if coordinates are within the drawing area.
withTranslated :: Int -> Int -> (Window -> Int -> Int -> Int -> IO a) -> Render ()
withTranslated y_ x_ action = Render $ do
r <- ask
case r of
Environment window offsetY offsetX (WindowSize sizeY sizeX)
| 0 <= x && x < (sizeX + offsetX)
&& 0 <= y && y < (sizeY + offsetY) -> liftIO $ void (action window y x n)
| otherwise -> return ()
where
x = x_ + offsetX
y = y_ + offsetY
n = sizeX - x
addstr :: Int -> Int -> String -> Render ()
addstr y_ x_ str = withTranslated y_ x_ $ \window y x n ->
mvwaddnwstr window y x str (fitToColumn str n)
-- |
-- Determine how many characters from a given string fit in a column of a given
-- width.
fitToColumn :: String -> Int -> Int
fitToColumn str maxWidth = go str 0 0
where
go [] _ n = n
go (x:xs) width n
| width_ <= maxWidth = go xs width_ (succ n)
| otherwise = n
where
width_ = width + wcwidth x
addLine :: Int -> Int -> TextLine -> Render ()
addLine y_ x_ (TextLine xs) = go y_ x_ xs
where
go y x chunks = case chunks of
[] -> return ()
c:cs -> case c of
Plain s -> addstr y x s >> go y (x + length s) cs
Colored color s -> withColor color (addstr y x s) >> go y (x + length s) cs
chgat :: Int -> [Attribute] -> WindowColor -> Render ()
chgat y_ attr wc = withTranslated y_ 0 $ \window y x n ->
mvwchgat window y x n attr wc
withColor :: WindowColor -> Render a -> Render a
withColor color action = do
window <- Render $ asks environmentWindow
setColor window color *> action <* setColor window MainColor
where
setColor w c = Render . liftIO $ wcolor_set w c
| haasn/vimus | src/Vimus/Render.hs | mit | 2,872 | 0 | 20 | 785 | 990 | 509 | 481 | 64 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.SQS.ReceiveMessage
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Retrieves one or more messages, with a maximum limit of 10 messages, from
-- the specified queue. Long poll support is enabled by using the 'WaitTimeSeconds'
-- parameter. For more information, see <http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-long-polling.html Amazon SQS Long Poll> in the /Amazon SQSDeveloper Guide/.
--
-- Short poll is the default behavior where a weighted random set of machines
-- is sampled on a 'ReceiveMessage' call. This means only the messages on the
-- sampled machines are returned. If the number of messages in the queue is
-- small (less than 1000), it is likely you will get fewer messages than you
-- requested per 'ReceiveMessage' call. If the number of messages in the queue is
-- extremely small, you might not receive any messages in a particular 'ReceiveMessage' response; in which case you should repeat the request.
--
-- For each message returned, the response includes the following:
--
-- Message body
--
-- MD5 digest of the message body. For information about MD5, go to <http://www.faqs.org/rfcs/rfc1321.html http://www.faqs.org/rfcs/rfc1321.html>.
--
-- Message ID you received when you sent the message to the queue.
--
-- Receipt handle.
--
-- Message attributes.
--
-- MD5 digest of the message attributes.
--
-- The receipt handle is the identifier you must provide when deleting the
-- message. For more information, see <http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/ImportantIdentifiers.html Queue and Message Identifiers> in the /Amazon SQS Developer Guide/.
--
-- You can provide the 'VisibilityTimeout' parameter in your request, which will
-- be applied to the messages that Amazon SQS returns in the response. If you do
-- not include the parameter, the overall visibility timeout for the queue is
-- used for the returned messages. For more information, see <http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/AboutVT.html Visibility Timeout>
-- in the /Amazon SQS Developer Guide/.
--
-- Going forward, new attributes might be added. If you are writing code that
-- calls this action, we recommend that you structure your code so that it can
-- handle new attributes gracefully.
--
--
--
-- <http://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/API_ReceiveMessage.html>
module Network.AWS.SQS.ReceiveMessage
(
-- * Request
ReceiveMessage
-- ** Request constructor
, receiveMessage
-- ** Request lenses
, rmAttributeNames
, rmMaxNumberOfMessages
, rmMessageAttributeNames
, rmQueueUrl
, rmVisibilityTimeout
, rmWaitTimeSeconds
-- * Response
, ReceiveMessageResponse
-- ** Response constructor
, receiveMessageResponse
-- ** Response lenses
, rmrMessages
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.SQS.Types
import qualified GHC.Exts
data ReceiveMessage = ReceiveMessage
{ _rmAttributeNames :: List "member" Text
, _rmMaxNumberOfMessages :: Maybe Int
, _rmMessageAttributeNames :: List "member" Text
, _rmQueueUrl :: Text
, _rmVisibilityTimeout :: Maybe Int
, _rmWaitTimeSeconds :: Maybe Int
} deriving (Eq, Read, Show)
-- | 'ReceiveMessage' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rmAttributeNames' @::@ ['Text']
--
-- * 'rmMaxNumberOfMessages' @::@ 'Maybe' 'Int'
--
-- * 'rmMessageAttributeNames' @::@ ['Text']
--
-- * 'rmQueueUrl' @::@ 'Text'
--
-- * 'rmVisibilityTimeout' @::@ 'Maybe' 'Int'
--
-- * 'rmWaitTimeSeconds' @::@ 'Maybe' 'Int'
--
receiveMessage :: Text -- ^ 'rmQueueUrl'
-> ReceiveMessage
receiveMessage p1 = ReceiveMessage
{ _rmQueueUrl = p1
, _rmAttributeNames = mempty
, _rmMessageAttributeNames = mempty
, _rmMaxNumberOfMessages = Nothing
, _rmVisibilityTimeout = Nothing
, _rmWaitTimeSeconds = Nothing
}
-- | A list of attributes that need to be returned along with each message.
--
-- The following lists the names and descriptions of the attributes that can
-- be returned:
--
-- 'All' - returns all values. 'ApproximateFirstReceiveTimestamp' - returns the
-- time when the message was first received from the queue (epoch time in
-- milliseconds). 'ApproximateReceiveCount' - returns the number of times a
-- message has been received from the queue but not deleted. 'SenderId' - returns
-- the AWS account number (or the IP address, if anonymous access is allowed) of
-- the sender. 'SentTimestamp' - returns the time when the message was sent to
-- the queue (epoch time in milliseconds).
rmAttributeNames :: Lens' ReceiveMessage [Text]
rmAttributeNames = lens _rmAttributeNames (\s a -> s { _rmAttributeNames = a }) . _List
-- | The maximum number of messages to return. Amazon SQS never returns more
-- messages than this value but may return fewer. Values can be from 1 to 10.
-- Default is 1.
--
-- All of the messages are not necessarily returned.
rmMaxNumberOfMessages :: Lens' ReceiveMessage (Maybe Int)
rmMaxNumberOfMessages =
lens _rmMaxNumberOfMessages (\s a -> s { _rmMaxNumberOfMessages = a })
-- | The name of the message attribute, where /N/ is the index. The message
-- attribute name can contain the following characters: A-Z, a-z, 0-9,
-- underscore (_), hyphen (-), and period (.). The name must not start or end
-- with a period, and it should not have successive periods. The name is case
-- sensitive and must be unique among all attribute names for the message. The
-- name can be up to 256 characters long. The name cannot start with "AWS." or
-- "Amazon." (or any variations in casing), because these prefixes are reserved
-- for use by Amazon Web Services.
--
-- When using 'ReceiveMessage', you can send a list of attribute names to
-- receive, or you can return all of the attributes by specifying "All" or ".*"
-- in your request. You can also use "foo.*" to return all message attributes
-- starting with the "foo" prefix.
rmMessageAttributeNames :: Lens' ReceiveMessage [Text]
rmMessageAttributeNames =
lens _rmMessageAttributeNames (\s a -> s { _rmMessageAttributeNames = a })
. _List
-- | The URL of the Amazon SQS queue to take action on.
rmQueueUrl :: Lens' ReceiveMessage Text
rmQueueUrl = lens _rmQueueUrl (\s a -> s { _rmQueueUrl = a })
-- | The duration (in seconds) that the received messages are hidden from
-- subsequent retrieve requests after being retrieved by a 'ReceiveMessage'
-- request.
rmVisibilityTimeout :: Lens' ReceiveMessage (Maybe Int)
rmVisibilityTimeout =
lens _rmVisibilityTimeout (\s a -> s { _rmVisibilityTimeout = a })
-- | The duration (in seconds) for which the call will wait for a message to
-- arrive in the queue before returning. If a message is available, the call
-- will return sooner than WaitTimeSeconds.
rmWaitTimeSeconds :: Lens' ReceiveMessage (Maybe Int)
rmWaitTimeSeconds =
lens _rmWaitTimeSeconds (\s a -> s { _rmWaitTimeSeconds = a })
newtype ReceiveMessageResponse = ReceiveMessageResponse
{ _rmrMessages :: List "member" Message
} deriving (Eq, Read, Show, Monoid, Semigroup)
-- | 'ReceiveMessageResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rmrMessages' @::@ ['Message']
--
receiveMessageResponse :: ReceiveMessageResponse
receiveMessageResponse = ReceiveMessageResponse
{ _rmrMessages = mempty
}
-- | A list of messages.
rmrMessages :: Lens' ReceiveMessageResponse [Message]
rmrMessages = lens _rmrMessages (\s a -> s { _rmrMessages = a }) . _List
instance ToPath ReceiveMessage where
toPath = const "/"
instance ToQuery ReceiveMessage where
toQuery ReceiveMessage{..} = mconcat
[ toQuery _rmAttributeNames
, "MaxNumberOfMessages" =? _rmMaxNumberOfMessages
, toQuery _rmMessageAttributeNames
, "QueueUrl" =? _rmQueueUrl
, "VisibilityTimeout" =? _rmVisibilityTimeout
, "WaitTimeSeconds" =? _rmWaitTimeSeconds
]
instance ToHeaders ReceiveMessage
instance AWSRequest ReceiveMessage where
type Sv ReceiveMessage = SQS
type Rs ReceiveMessage = ReceiveMessageResponse
request = post "ReceiveMessage"
response = xmlResponse
instance FromXML ReceiveMessageResponse where
parseXML = withElement "ReceiveMessageResult" $ \x -> ReceiveMessageResponse
<$> parseXML x
| kim/amazonka | amazonka-sqs/gen/Network/AWS/SQS/ReceiveMessage.hs | mpl-2.0 | 9,521 | 0 | 10 | 1,885 | 855 | 538 | 317 | 88 | 1 |
{-# LANGUAGE CPP, RecordWildCards, GADTs #-}
module CmmLayoutStack (
cmmLayoutStack, setInfoTableStackMap
) where
import StgCmmUtils ( callerSaveVolatileRegs ) -- XXX layering violation
import StgCmmForeign ( saveThreadState, loadThreadState ) -- XXX layering violation
import BasicTypes
import Cmm
import CmmInfo
import BlockId
import CLabel
import CmmUtils
import MkGraph
import ForeignCall
import CmmLive
import CmmProcPoint
import SMRep
import Hoopl
import UniqSupply
import StgCmmUtils ( newTemp )
import Maybes
import UniqFM
import Util
import DynFlags
import FastString
import Outputable hiding ( isEmpty )
import qualified Data.Set as Set
import Control.Monad.Fix
import Data.Array as Array
import Data.Bits
import Data.List (nub)
import Control.Monad (liftM)
import Prelude hiding ((<*>))
#include "HsVersions.h"
{- Note [Stack Layout]
The job of this pass is to
- replace references to abstract stack Areas with fixed offsets from Sp.
- replace the CmmHighStackMark constant used in the stack check with
the maximum stack usage of the proc.
- save any variables that are live across a call, and reload them as
necessary.
Before stack allocation, local variables remain live across native
calls (CmmCall{ cmm_cont = Just _ }), and after stack allocation local
variables are clobbered by native calls.
We want to do stack allocation so that as far as possible
- stack use is minimized, and
- unnecessary stack saves and loads are avoided.
The algorithm we use is a variant of linear-scan register allocation,
where the stack is our register file.
- First, we do a liveness analysis, which annotates every block with
the variables live on entry to the block.
- We traverse blocks in reverse postorder DFS; that is, we visit at
least one predecessor of a block before the block itself. The
stack layout flowing from the predecessor of the block will
determine the stack layout on entry to the block.
- We maintain a data structure
Map Label StackMap
which describes the contents of the stack and the stack pointer on
entry to each block that is a successor of a block that we have
visited.
- For each block we visit:
- Look up the StackMap for this block.
- If this block is a proc point (or a call continuation, if we
aren't splitting proc points), emit instructions to reload all
the live variables from the stack, according to the StackMap.
- Walk forwards through the instructions:
- At an assignment x = Sp[loc]
- Record the fact that Sp[loc] contains x, so that we won't
need to save x if it ever needs to be spilled.
- At an assignment x = E
- If x was previously on the stack, it isn't any more
- At the last node, if it is a call or a jump to a proc point
- Lay out the stack frame for the call (see setupStackFrame)
- emit instructions to save all the live variables
- Remember the StackMaps for all the successors
- emit an instruction to adjust Sp
- If the last node is a branch, then the current StackMap is the
StackMap for the successors.
- Manifest Sp: replace references to stack areas in this block
with real Sp offsets. We cannot do this until we have laid out
the stack area for the successors above.
In this phase we also eliminate redundant stores to the stack;
see elimStackStores.
- There is one important gotcha: sometimes we'll encounter a control
transfer to a block that we've already processed (a join point),
and in that case we might need to rearrange the stack to match
what the block is expecting. (exactly the same as in linear-scan
register allocation, except here we have the luxury of an infinite
supply of temporary variables).
- Finally, we update the magic CmmHighStackMark constant with the
stack usage of the function, and eliminate the whole stack check
if there was no stack use. (in fact this is done as part of the
main traversal, by feeding the high-water-mark output back in as
an input. I hate cyclic programming, but it's just too convenient
sometimes.)
There are plenty of tricky details: update frames, proc points, return
addresses, foreign calls, and some ad-hoc optimisations that are
convenient to do here and effective in common cases. Comments in the
code below explain these.
-}
-- All stack locations are expressed as positive byte offsets from the
-- "base", which is defined to be the address above the return address
-- on the stack on entry to this CmmProc.
--
-- Lower addresses have higher StackLocs.
--
type StackLoc = ByteOff
{-
A StackMap describes the stack at any given point. At a continuation
it has a particular layout, like this:
| | <- base
|-------------|
| ret0 | <- base + 8
|-------------|
. upd frame . <- base + sm_ret_off
|-------------|
| |
. vars .
. (live/dead) .
| | <- base + sm_sp - sm_args
|-------------|
| ret1 |
. ret vals . <- base + sm_sp (<--- Sp points here)
|-------------|
Why do we include the final return address (ret0) in our stack map? I
have absolutely no idea, but it seems to be done that way consistently
in the rest of the code generator, so I played along here. --SDM
Note that we will be constructing an info table for the continuation
(ret1), which needs to describe the stack down to, but not including,
the update frame (or ret0, if there is no update frame).
-}
data StackMap = StackMap
{ sm_sp :: StackLoc
-- ^ the offset of Sp relative to the base on entry
-- to this block.
, sm_args :: ByteOff
-- ^ the number of bytes of arguments in the area for this block
-- Defn: the offset of young(L) relative to the base is given by
-- (sm_sp - sm_args) of the StackMap for block L.
, sm_ret_off :: ByteOff
-- ^ Number of words of stack that we do not describe with an info
-- table, because it contains an update frame.
, sm_regs :: UniqFM (LocalReg,StackLoc)
-- ^ regs on the stack
}
instance Outputable StackMap where
ppr StackMap{..} =
text "Sp = " <> int sm_sp $$
text "sm_args = " <> int sm_args $$
text "sm_ret_off = " <> int sm_ret_off $$
text "sm_regs = " <> ppr (eltsUFM sm_regs)
cmmLayoutStack :: DynFlags -> ProcPointSet -> ByteOff -> CmmGraph
-> UniqSM (CmmGraph, BlockEnv StackMap)
cmmLayoutStack dflags procpoints entry_args
graph0@(CmmGraph { g_entry = entry })
= do
-- We need liveness info. Dead assignments are removed later
-- by the sinking pass.
let (graph, liveness) = (graph0, cmmLocalLiveness dflags graph0)
blocks = postorderDfs graph
(final_stackmaps, _final_high_sp, new_blocks) <-
mfix $ \ ~(rec_stackmaps, rec_high_sp, _new_blocks) ->
layout dflags procpoints liveness entry entry_args
rec_stackmaps rec_high_sp blocks
new_blocks' <- mapM (lowerSafeForeignCall dflags) new_blocks
return (ofBlockList entry new_blocks', final_stackmaps)
layout :: DynFlags
-> BlockSet -- proc points
-> BlockEnv CmmLocalLive -- liveness
-> BlockId -- entry
-> ByteOff -- stack args on entry
-> BlockEnv StackMap -- [final] stack maps
-> ByteOff -- [final] Sp high water mark
-> [CmmBlock] -- [in] blocks
-> UniqSM
( BlockEnv StackMap -- [out] stack maps
, ByteOff -- [out] Sp high water mark
, [CmmBlock] -- [out] new blocks
)
layout dflags procpoints liveness entry entry_args final_stackmaps final_sp_high blocks
= go blocks init_stackmap entry_args []
where
(updfr, cont_info) = collectContInfo blocks
init_stackmap = mapSingleton entry StackMap{ sm_sp = entry_args
, sm_args = entry_args
, sm_ret_off = updfr
, sm_regs = emptyUFM
}
go [] acc_stackmaps acc_hwm acc_blocks
= return (acc_stackmaps, acc_hwm, acc_blocks)
go (b0 : bs) acc_stackmaps acc_hwm acc_blocks
= do
let (entry0@(CmmEntry entry_lbl tscope), middle0, last0) = blockSplit b0
let stack0@StackMap { sm_sp = sp0 }
= mapFindWithDefault
(pprPanic "no stack map for" (ppr entry_lbl))
entry_lbl acc_stackmaps
-- (a) Update the stack map to include the effects of
-- assignments in this block
let stack1 = foldBlockNodesF (procMiddle acc_stackmaps) middle0 stack0
-- (b) Insert assignments to reload all the live variables if this
-- block is a proc point
let middle1 = if entry_lbl `setMember` procpoints
then foldr blockCons middle0 (insertReloads stack0)
else middle0
-- (c) Look at the last node and if we are making a call or
-- jumping to a proc point, we must save the live
-- variables, adjust Sp, and construct the StackMaps for
-- each of the successor blocks. See handleLastNode for
-- details.
(middle2, sp_off, last1, fixup_blocks, out)
<- handleLastNode dflags procpoints liveness cont_info
acc_stackmaps stack1 tscope middle0 last0
-- (d) Manifest Sp: run over the nodes in the block and replace
-- CmmStackSlot with CmmLoad from Sp with a concrete offset.
--
-- our block:
-- middle1 -- the original middle nodes
-- middle2 -- live variable saves from handleLastNode
-- Sp = Sp + sp_off -- Sp adjustment goes here
-- last1 -- the last node
--
let middle_pre = blockToList $ foldl blockSnoc middle1 middle2
final_blocks = manifestSp dflags final_stackmaps stack0 sp0 final_sp_high entry0
middle_pre sp_off last1 fixup_blocks
acc_stackmaps' = mapUnion acc_stackmaps out
-- If this block jumps to the GC, then we do not take its
-- stack usage into account for the high-water mark.
-- Otherwise, if the only stack usage is in the stack-check
-- failure block itself, we will do a redundant stack
-- check. The stack has a buffer designed to accommodate
-- the largest amount of stack needed for calling the GC.
--
this_sp_hwm | isGcJump last0 = 0
| otherwise = sp0 - sp_off
hwm' = maximum (acc_hwm : this_sp_hwm : map sm_sp (mapElems out))
go bs acc_stackmaps' hwm' (final_blocks ++ acc_blocks)
-- -----------------------------------------------------------------------------
-- Not foolproof, but GCFun is the culprit we most want to catch
isGcJump :: CmmNode O C -> Bool
isGcJump (CmmCall { cml_target = CmmReg (CmmGlobal l) })
= l == GCFun || l == GCEnter1
isGcJump _something_else = False
-- -----------------------------------------------------------------------------
-- This doesn't seem right somehow. We need to find out whether this
-- proc will push some update frame material at some point, so that we
-- can avoid using that area of the stack for spilling. The
-- updfr_space field of the CmmProc *should* tell us, but it doesn't
-- (I think maybe it gets filled in later when we do proc-point
-- splitting).
--
-- So we'll just take the max of all the cml_ret_offs. This could be
-- unnecessarily pessimistic, but probably not in the code we
-- generate.
collectContInfo :: [CmmBlock] -> (ByteOff, BlockEnv ByteOff)
collectContInfo blocks
= (maximum ret_offs, mapFromList (catMaybes mb_argss))
where
(mb_argss, ret_offs) = mapAndUnzip get_cont blocks
get_cont :: Block CmmNode x C -> (Maybe (Label, ByteOff), ByteOff)
get_cont b =
case lastNode b of
CmmCall { cml_cont = Just l, .. }
-> (Just (l, cml_ret_args), cml_ret_off)
CmmForeignCall { .. }
-> (Just (succ, ret_args), ret_off)
_other -> (Nothing, 0)
-- -----------------------------------------------------------------------------
-- Updating the StackMap from middle nodes
-- Look for loads from stack slots, and update the StackMap. This is
-- purely for optimisation reasons, so that we can avoid saving a
-- variable back to a different stack slot if it is already on the
-- stack.
--
-- This happens a lot: for example when function arguments are passed
-- on the stack and need to be immediately saved across a call, we
-- want to just leave them where they are on the stack.
--
procMiddle :: BlockEnv StackMap -> CmmNode e x -> StackMap -> StackMap
procMiddle stackmaps node sm
= case node of
CmmAssign (CmmLocal r) (CmmLoad (CmmStackSlot area off) _)
-> sm { sm_regs = addToUFM (sm_regs sm) r (r,loc) }
where loc = getStackLoc area off stackmaps
CmmAssign (CmmLocal r) _other
-> sm { sm_regs = delFromUFM (sm_regs sm) r }
_other
-> sm
getStackLoc :: Area -> ByteOff -> BlockEnv StackMap -> StackLoc
getStackLoc Old n _ = n
getStackLoc (Young l) n stackmaps =
case mapLookup l stackmaps of
Nothing -> pprPanic "getStackLoc" (ppr l)
Just sm -> sm_sp sm - sm_args sm + n
-- -----------------------------------------------------------------------------
-- Handling stack allocation for a last node
-- We take a single last node and turn it into:
--
-- C1 (some statements)
-- Sp = Sp + N
-- C2 (some more statements)
-- call f() -- the actual last node
--
-- plus possibly some more blocks (we may have to add some fixup code
-- between the last node and the continuation).
--
-- C1: is the code for saving the variables across this last node onto
-- the stack, if the continuation is a call or jumps to a proc point.
--
-- C2: if the last node is a safe foreign call, we have to inject some
-- extra code that goes *after* the Sp adjustment.
handleLastNode
:: DynFlags -> ProcPointSet -> BlockEnv CmmLocalLive -> BlockEnv ByteOff
-> BlockEnv StackMap -> StackMap -> CmmTickScope
-> Block CmmNode O O
-> CmmNode O C
-> UniqSM
( [CmmNode O O] -- nodes to go *before* the Sp adjustment
, ByteOff -- amount to adjust Sp
, CmmNode O C -- new last node
, [CmmBlock] -- new blocks
, BlockEnv StackMap -- stackmaps for the continuations
)
handleLastNode dflags procpoints liveness cont_info stackmaps
stack0@StackMap { sm_sp = sp0 } tscp middle last
= case last of
-- At each return / tail call,
-- adjust Sp to point to the last argument pushed, which
-- is cml_args, after popping any other junk from the stack.
CmmCall{ cml_cont = Nothing, .. } -> do
let sp_off = sp0 - cml_args
return ([], sp_off, last, [], mapEmpty)
-- At each CmmCall with a continuation:
CmmCall{ cml_cont = Just cont_lbl, .. } ->
return $ lastCall cont_lbl cml_args cml_ret_args cml_ret_off
CmmForeignCall{ succ = cont_lbl, .. } -> do
return $ lastCall cont_lbl (wORD_SIZE dflags) ret_args ret_off
-- one word of args: the return address
CmmBranch {} -> handleBranches
CmmCondBranch {} -> handleBranches
CmmSwitch {} -> handleBranches
where
-- Calls and ForeignCalls are handled the same way:
lastCall :: BlockId -> ByteOff -> ByteOff -> ByteOff
-> ( [CmmNode O O]
, ByteOff
, CmmNode O C
, [CmmBlock]
, BlockEnv StackMap
)
lastCall lbl cml_args cml_ret_args cml_ret_off
= ( assignments
, spOffsetForCall sp0 cont_stack cml_args
, last
, [] -- no new blocks
, mapSingleton lbl cont_stack )
where
(assignments, cont_stack) = prepareStack lbl cml_ret_args cml_ret_off
prepareStack lbl cml_ret_args cml_ret_off
| Just cont_stack <- mapLookup lbl stackmaps
-- If we have already seen this continuation before, then
-- we just have to make the stack look the same:
= (fixupStack stack0 cont_stack, cont_stack)
-- Otherwise, we have to allocate the stack frame
| otherwise
= (save_assignments, new_cont_stack)
where
(new_cont_stack, save_assignments)
= setupStackFrame dflags lbl liveness cml_ret_off cml_ret_args stack0
-- For other last nodes (branches), if any of the targets is a
-- proc point, we have to set up the stack to match what the proc
-- point is expecting.
--
handleBranches :: UniqSM ( [CmmNode O O]
, ByteOff
, CmmNode O C
, [CmmBlock]
, BlockEnv StackMap )
handleBranches
-- Note [diamond proc point]
| Just l <- futureContinuation middle
, (nub $ filter (`setMember` procpoints) $ successors last) == [l]
= do
let cont_args = mapFindWithDefault 0 l cont_info
(assigs, cont_stack) = prepareStack l cont_args (sm_ret_off stack0)
out = mapFromList [ (l', cont_stack)
| l' <- successors last ]
return ( assigs
, spOffsetForCall sp0 cont_stack (wORD_SIZE dflags)
, last
, []
, out)
| otherwise = do
pps <- mapM handleBranch (successors last)
let lbl_map :: LabelMap Label
lbl_map = mapFromList [ (l,tmp) | (l,tmp,_,_) <- pps ]
fix_lbl l = mapFindWithDefault l l lbl_map
return ( []
, 0
, mapSuccessors fix_lbl last
, concat [ blk | (_,_,_,blk) <- pps ]
, mapFromList [ (l, sm) | (l,_,sm,_) <- pps ] )
-- For each successor of this block
handleBranch :: BlockId -> UniqSM (BlockId, BlockId, StackMap, [CmmBlock])
handleBranch l
-- (a) if the successor already has a stackmap, we need to
-- shuffle the current stack to make it look the same.
-- We have to insert a new block to make this happen.
| Just stack2 <- mapLookup l stackmaps
= do
let assigs = fixupStack stack0 stack2
(tmp_lbl, block) <- makeFixupBlock dflags sp0 l stack2 tscp assigs
return (l, tmp_lbl, stack2, block)
-- (b) if the successor is a proc point, save everything
-- on the stack.
| l `setMember` procpoints
= do
let cont_args = mapFindWithDefault 0 l cont_info
(stack2, assigs) =
setupStackFrame dflags l liveness (sm_ret_off stack0)
cont_args stack0
(tmp_lbl, block) <- makeFixupBlock dflags sp0 l stack2 tscp assigs
return (l, tmp_lbl, stack2, block)
-- (c) otherwise, the current StackMap is the StackMap for
-- the continuation. But we must remember to remove any
-- variables from the StackMap that are *not* live at
-- the destination, because this StackMap might be used
-- by fixupStack if this is a join point.
| otherwise = return (l, l, stack1, [])
where live = mapFindWithDefault (panic "handleBranch") l liveness
stack1 = stack0 { sm_regs = filterUFM is_live (sm_regs stack0) }
is_live (r,_) = r `elemRegSet` live
makeFixupBlock :: DynFlags -> ByteOff -> Label -> StackMap
-> CmmTickScope -> [CmmNode O O]
-> UniqSM (Label, [CmmBlock])
makeFixupBlock dflags sp0 l stack tscope assigs
| null assigs && sp0 == sm_sp stack = return (l, [])
| otherwise = do
tmp_lbl <- liftM mkBlockId $ getUniqueM
let sp_off = sp0 - sm_sp stack
block = blockJoin (CmmEntry tmp_lbl tscope)
(maybeAddSpAdj dflags sp_off (blockFromList assigs))
(CmmBranch l)
return (tmp_lbl, [block])
-- Sp is currently pointing to current_sp,
-- we want it to point to
-- (sm_sp cont_stack - sm_args cont_stack + args)
-- so the difference is
-- sp0 - (sm_sp cont_stack - sm_args cont_stack + args)
spOffsetForCall :: ByteOff -> StackMap -> ByteOff -> ByteOff
spOffsetForCall current_sp cont_stack args
= current_sp - (sm_sp cont_stack - sm_args cont_stack + args)
-- | create a sequence of assignments to establish the new StackMap,
-- given the old StackMap.
fixupStack :: StackMap -> StackMap -> [CmmNode O O]
fixupStack old_stack new_stack = concatMap move new_locs
where
old_map = sm_regs old_stack
new_locs = stackSlotRegs new_stack
move (r,n)
| Just (_,m) <- lookupUFM old_map r, n == m = []
| otherwise = [CmmStore (CmmStackSlot Old n)
(CmmReg (CmmLocal r))]
setupStackFrame
:: DynFlags
-> BlockId -- label of continuation
-> BlockEnv CmmLocalLive -- liveness
-> ByteOff -- updfr
-> ByteOff -- bytes of return values on stack
-> StackMap -- current StackMap
-> (StackMap, [CmmNode O O])
setupStackFrame dflags lbl liveness updfr_off ret_args stack0
= (cont_stack, assignments)
where
-- get the set of LocalRegs live in the continuation
live = mapFindWithDefault Set.empty lbl liveness
-- the stack from the base to updfr_off is off-limits.
-- our new stack frame contains:
-- * saved live variables
-- * the return address [young(C) + 8]
-- * the args for the call,
-- which are replaced by the return values at the return
-- point.
-- everything up to updfr_off is off-limits
-- stack1 contains updfr_off, plus everything we need to save
(stack1, assignments) = allocate dflags updfr_off live stack0
-- And the Sp at the continuation is:
-- sm_sp stack1 + ret_args
cont_stack = stack1{ sm_sp = sm_sp stack1 + ret_args
, sm_args = ret_args
, sm_ret_off = updfr_off
}
-- -----------------------------------------------------------------------------
-- Note [diamond proc point]
--
-- This special case looks for the pattern we get from a typical
-- tagged case expression:
--
-- Sp[young(L1)] = L1
-- if (R1 & 7) != 0 goto L1 else goto L2
-- L2:
-- call [R1] returns to L1
-- L1: live: {y}
-- x = R1
--
-- If we let the generic case handle this, we get
--
-- Sp[-16] = L1
-- if (R1 & 7) != 0 goto L1a else goto L2
-- L2:
-- Sp[-8] = y
-- Sp = Sp - 16
-- call [R1] returns to L1
-- L1a:
-- Sp[-8] = y
-- Sp = Sp - 16
-- goto L1
-- L1:
-- x = R1
--
-- The code for saving the live vars is duplicated in each branch, and
-- furthermore there is an extra jump in the fast path (assuming L1 is
-- a proc point, which it probably is if there is a heap check).
--
-- So to fix this we want to set up the stack frame before the
-- conditional jump. How do we know when to do this, and when it is
-- safe? The basic idea is, when we see the assignment
--
-- Sp[young(L)] = L
--
-- we know that
-- * we are definitely heading for L
-- * there can be no more reads from another stack area, because young(L)
-- overlaps with it.
--
-- We don't necessarily know that everything live at L is live now
-- (some might be assigned between here and the jump to L). So we
-- simplify and only do the optimisation when we see
--
-- (1) a block containing an assignment of a return address L
-- (2) ending in a branch where one (and only) continuation goes to L,
-- and no other continuations go to proc points.
--
-- then we allocate the stack frame for L at the end of the block,
-- before the branch.
--
-- We could generalise (2), but that would make it a bit more
-- complicated to handle, and this currently catches the common case.
futureContinuation :: Block CmmNode O O -> Maybe BlockId
futureContinuation middle = foldBlockNodesB f middle Nothing
where f :: CmmNode a b -> Maybe BlockId -> Maybe BlockId
f (CmmStore (CmmStackSlot (Young l) _) (CmmLit (CmmBlock _))) _
= Just l
f _ r = r
-- -----------------------------------------------------------------------------
-- Saving live registers
-- | Given a set of live registers and a StackMap, save all the registers
-- on the stack and return the new StackMap and the assignments to do
-- the saving.
--
allocate :: DynFlags -> ByteOff -> LocalRegSet -> StackMap
-> (StackMap, [CmmNode O O])
allocate dflags ret_off live stackmap@StackMap{ sm_sp = sp0
, sm_regs = regs0 }
=
-- we only have to save regs that are not already in a slot
let to_save = filter (not . (`elemUFM` regs0)) (Set.elems live)
regs1 = filterUFM (\(r,_) -> elemRegSet r live) regs0
in
-- make a map of the stack
let stack = reverse $ Array.elems $
accumArray (\_ x -> x) Empty (1, toWords dflags (max sp0 ret_off)) $
ret_words ++ live_words
where ret_words =
[ (x, Occupied)
| x <- [ 1 .. toWords dflags ret_off] ]
live_words =
[ (toWords dflags x, Occupied)
| (r,off) <- eltsUFM regs1,
let w = localRegBytes dflags r,
x <- [ off, off - wORD_SIZE dflags .. off - w + 1] ]
in
-- Pass over the stack: find slots to save all the new live variables,
-- choosing the oldest slots first (hence a foldr).
let
save slot ([], stack, n, assigs, regs) -- no more regs to save
= ([], slot:stack, plusW dflags n 1, assigs, regs)
save slot (to_save, stack, n, assigs, regs)
= case slot of
Occupied -> (to_save, Occupied:stack, plusW dflags n 1, assigs, regs)
Empty
| Just (stack', r, to_save') <-
select_save to_save (slot:stack)
-> let assig = CmmStore (CmmStackSlot Old n')
(CmmReg (CmmLocal r))
n' = plusW dflags n 1
in
(to_save', stack', n', assig : assigs, (r,(r,n')):regs)
| otherwise
-> (to_save, slot:stack, plusW dflags n 1, assigs, regs)
-- we should do better here: right now we'll fit the smallest first,
-- but it would make more sense to fit the biggest first.
select_save :: [LocalReg] -> [StackSlot]
-> Maybe ([StackSlot], LocalReg, [LocalReg])
select_save regs stack = go regs []
where go [] _no_fit = Nothing
go (r:rs) no_fit
| Just rest <- dropEmpty words stack
= Just (replicate words Occupied ++ rest, r, rs++no_fit)
| otherwise
= go rs (r:no_fit)
where words = localRegWords dflags r
-- fill in empty slots as much as possible
(still_to_save, save_stack, n, save_assigs, save_regs)
= foldr save (to_save, [], 0, [], []) stack
-- push any remaining live vars on the stack
(push_sp, push_assigs, push_regs)
= foldr push (n, [], []) still_to_save
where
push r (n, assigs, regs)
= (n', assig : assigs, (r,(r,n')) : regs)
where
n' = n + localRegBytes dflags r
assig = CmmStore (CmmStackSlot Old n')
(CmmReg (CmmLocal r))
trim_sp
| not (null push_regs) = push_sp
| otherwise
= plusW dflags n (- length (takeWhile isEmpty save_stack))
final_regs = regs1 `addListToUFM` push_regs
`addListToUFM` save_regs
in
-- XXX should be an assert
if ( n /= max sp0 ret_off ) then pprPanic "allocate" (ppr n <+> ppr sp0 <+> ppr ret_off) else
if (trim_sp .&. (wORD_SIZE dflags - 1)) /= 0 then pprPanic "allocate2" (ppr trim_sp <+> ppr final_regs <+> ppr push_sp) else
( stackmap { sm_regs = final_regs , sm_sp = trim_sp }
, push_assigs ++ save_assigs )
-- -----------------------------------------------------------------------------
-- Manifesting Sp
-- | Manifest Sp: turn all the CmmStackSlots into CmmLoads from Sp. The
-- block looks like this:
--
-- middle_pre -- the middle nodes
-- Sp = Sp + sp_off -- Sp adjustment goes here
-- last -- the last node
--
-- And we have some extra blocks too (that don't contain Sp adjustments)
--
-- The adjustment for middle_pre will be different from that for
-- middle_post, because the Sp adjustment intervenes.
--
manifestSp
:: DynFlags
-> BlockEnv StackMap -- StackMaps for other blocks
-> StackMap -- StackMap for this block
-> ByteOff -- Sp on entry to the block
-> ByteOff -- SpHigh
-> CmmNode C O -- first node
-> [CmmNode O O] -- middle
-> ByteOff -- sp_off
-> CmmNode O C -- last node
-> [CmmBlock] -- new blocks
-> [CmmBlock] -- final blocks with Sp manifest
manifestSp dflags stackmaps stack0 sp0 sp_high
first middle_pre sp_off last fixup_blocks
= final_block : fixup_blocks'
where
area_off = getAreaOff stackmaps
adj_pre_sp, adj_post_sp :: CmmNode e x -> CmmNode e x
adj_pre_sp = mapExpDeep (areaToSp dflags sp0 sp_high area_off)
adj_post_sp = mapExpDeep (areaToSp dflags (sp0 - sp_off) sp_high area_off)
-- Add unwind pseudo-instructions to document Sp level for debugging
add_unwind_info block
| debugLevel dflags > 0 = CmmUnwind Sp sp_unwind : block
| otherwise = block
sp_unwind = CmmRegOff (CmmGlobal Sp) (sp0 - wORD_SIZE dflags)
final_middle = maybeAddSpAdj dflags sp_off $
blockFromList $
add_unwind_info $
map adj_pre_sp $
elimStackStores stack0 stackmaps area_off $
middle_pre
final_last = optStackCheck (adj_post_sp last)
final_block = blockJoin first final_middle final_last
fixup_blocks' = map (mapBlock3' (id, adj_post_sp, id)) fixup_blocks
getAreaOff :: BlockEnv StackMap -> (Area -> StackLoc)
getAreaOff _ Old = 0
getAreaOff stackmaps (Young l) =
case mapLookup l stackmaps of
Just sm -> sm_sp sm - sm_args sm
Nothing -> pprPanic "getAreaOff" (ppr l)
maybeAddSpAdj :: DynFlags -> ByteOff -> Block CmmNode O O -> Block CmmNode O O
maybeAddSpAdj _ 0 block = block
maybeAddSpAdj dflags sp_off block
= block `blockSnoc` CmmAssign spReg (cmmOffset dflags (CmmReg spReg) sp_off)
{-
Sp(L) is the Sp offset on entry to block L relative to the base of the
OLD area.
SpArgs(L) is the size of the young area for L, i.e. the number of
arguments.
- in block L, each reference to [old + N] turns into
[Sp + Sp(L) - N]
- in block L, each reference to [young(L') + N] turns into
[Sp + Sp(L) - Sp(L') + SpArgs(L') - N]
- be careful with the last node of each block: Sp has already been adjusted
to be Sp + Sp(L) - Sp(L')
-}
areaToSp :: DynFlags -> ByteOff -> ByteOff -> (Area -> StackLoc) -> CmmExpr -> CmmExpr
areaToSp dflags sp_old _sp_hwm area_off (CmmStackSlot area n)
= cmmOffset dflags (CmmReg spReg) (sp_old - area_off area - n)
-- Replace (CmmStackSlot area n) with an offset from Sp
areaToSp dflags _ sp_hwm _ (CmmLit CmmHighStackMark)
= mkIntExpr dflags sp_hwm
-- Replace CmmHighStackMark with the number of bytes of stack used,
-- the sp_hwm. See Note [Stack usage] in StgCmmHeap
areaToSp dflags _ _ _ (CmmMachOp (MO_U_Lt _) args)
| falseStackCheck args
= zeroExpr dflags
areaToSp dflags _ _ _ (CmmMachOp (MO_U_Ge _) args)
| falseStackCheck args
= mkIntExpr dflags 1
-- Replace a stack-overflow test that cannot fail with a no-op
-- See Note [Always false stack check]
areaToSp _ _ _ _ other = other
-- | Determine whether a stack check cannot fail.
falseStackCheck :: [CmmExpr] -> Bool
falseStackCheck [ CmmMachOp (MO_Sub _)
[ CmmRegOff (CmmGlobal Sp) x_off
, CmmLit (CmmInt y_lit _)]
, CmmReg (CmmGlobal SpLim)]
= fromIntegral x_off >= y_lit
falseStackCheck _ = False
-- Note [Always false stack check]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- We can optimise stack checks of the form
--
-- if ((Sp + x) - y < SpLim) then .. else ..
--
-- where are non-negative integer byte offsets. Since we know that
-- SpLim <= Sp (remember the stack grows downwards), this test must
-- yield False if (x >= y), so we can rewrite the comparison to False.
-- A subsequent sinking pass will later drop the dead code.
-- Optimising this away depends on knowing that SpLim <= Sp, so it is
-- really the job of the stack layout algorithm, hence we do it now.
--
-- The control flow optimiser may negate a conditional to increase
-- the likelihood of a fallthrough if the branch is not taken. But
-- not every conditional is inverted as the control flow optimiser
-- places some requirements on the predecessors of both branch targets.
-- So we better look for the inverted comparison too.
optStackCheck :: CmmNode O C -> CmmNode O C
optStackCheck n = -- Note [Always false stack check]
case n of
CmmCondBranch (CmmLit (CmmInt 0 _)) _true false _ -> CmmBranch false
CmmCondBranch (CmmLit (CmmInt _ _)) true _false _ -> CmmBranch true
other -> other
-- -----------------------------------------------------------------------------
-- | Eliminate stores of the form
--
-- Sp[area+n] = r
--
-- when we know that r is already in the same slot as Sp[area+n]. We
-- could do this in a later optimisation pass, but that would involve
-- a separate analysis and we already have the information to hand
-- here. It helps clean up some extra stack stores in common cases.
--
-- Note that we may have to modify the StackMap as we walk through the
-- code using procMiddle, since an assignment to a variable in the
-- StackMap will invalidate its mapping there.
--
elimStackStores :: StackMap
-> BlockEnv StackMap
-> (Area -> ByteOff)
-> [CmmNode O O]
-> [CmmNode O O]
elimStackStores stackmap stackmaps area_off nodes
= go stackmap nodes
where
go _stackmap [] = []
go stackmap (n:ns)
= case n of
CmmStore (CmmStackSlot area m) (CmmReg (CmmLocal r))
| Just (_,off) <- lookupUFM (sm_regs stackmap) r
, area_off area + m == off
-> go stackmap ns
_otherwise
-> n : go (procMiddle stackmaps n stackmap) ns
-- -----------------------------------------------------------------------------
-- Update info tables to include stack liveness
setInfoTableStackMap :: DynFlags -> BlockEnv StackMap -> CmmDecl -> CmmDecl
setInfoTableStackMap dflags stackmaps (CmmProc top_info@TopInfo{..} l v g)
= CmmProc top_info{ info_tbls = mapMapWithKey fix_info info_tbls } l v g
where
fix_info lbl info_tbl@CmmInfoTable{ cit_rep = StackRep _ } =
info_tbl { cit_rep = StackRep (get_liveness lbl) }
fix_info _ other = other
get_liveness :: BlockId -> Liveness
get_liveness lbl
= case mapLookup lbl stackmaps of
Nothing -> pprPanic "setInfoTableStackMap" (ppr lbl <+> ppr info_tbls)
Just sm -> stackMapToLiveness dflags sm
setInfoTableStackMap _ _ d = d
stackMapToLiveness :: DynFlags -> StackMap -> Liveness
stackMapToLiveness dflags StackMap{..} =
reverse $ Array.elems $
accumArray (\_ x -> x) True (toWords dflags sm_ret_off + 1,
toWords dflags (sm_sp - sm_args)) live_words
where
live_words = [ (toWords dflags off, False)
| (r,off) <- eltsUFM sm_regs, isGcPtrType (localRegType r) ]
-- -----------------------------------------------------------------------------
-- Lowering safe foreign calls
{-
Note [Lower safe foreign calls]
We start with
Sp[young(L1)] = L1
,-----------------------
| r1 = foo(x,y,z) returns to L1
'-----------------------
L1:
R1 = r1 -- copyIn, inserted by mkSafeCall
...
the stack layout algorithm will arrange to save and reload everything
live across the call. Our job now is to expand the call so we get
Sp[young(L1)] = L1
,-----------------------
| SAVE_THREAD_STATE()
| token = suspendThread(BaseReg, interruptible)
| r = foo(x,y,z)
| BaseReg = resumeThread(token)
| LOAD_THREAD_STATE()
| R1 = r -- copyOut
| jump Sp[0]
'-----------------------
L1:
r = R1 -- copyIn, inserted by mkSafeCall
...
Note the copyOut, which saves the results in the places that L1 is
expecting them (see Note {safe foreign call convention]). Note also
that safe foreign call is replace by an unsafe one in the Cmm graph.
-}
lowerSafeForeignCall :: DynFlags -> CmmBlock -> UniqSM CmmBlock
lowerSafeForeignCall dflags block
| (entry@(CmmEntry _ tscp), middle, CmmForeignCall { .. }) <- blockSplit block
= do
-- Both 'id' and 'new_base' are KindNonPtr because they're
-- RTS-only objects and are not subject to garbage collection
id <- newTemp (bWord dflags)
new_base <- newTemp (cmmRegType dflags (CmmGlobal BaseReg))
let (caller_save, caller_load) = callerSaveVolatileRegs dflags
save_state_code <- saveThreadState dflags
load_state_code <- loadThreadState dflags
let suspend = save_state_code <*>
caller_save <*>
mkMiddle (callSuspendThread dflags id intrbl)
midCall = mkUnsafeCall tgt res args
resume = mkMiddle (callResumeThread new_base id) <*>
-- Assign the result to BaseReg: we
-- might now have a different Capability!
mkAssign (CmmGlobal BaseReg) (CmmReg (CmmLocal new_base)) <*>
caller_load <*>
load_state_code
(_, regs, copyout) =
copyOutOflow dflags NativeReturn Jump (Young succ)
(map (CmmReg . CmmLocal) res)
ret_off []
-- NB. after resumeThread returns, the top-of-stack probably contains
-- the stack frame for succ, but it might not: if the current thread
-- received an exception during the call, then the stack might be
-- different. Hence we continue by jumping to the top stack frame,
-- not by jumping to succ.
jump = CmmCall { cml_target = entryCode dflags $
CmmLoad (CmmReg spReg) (bWord dflags)
, cml_cont = Just succ
, cml_args_regs = regs
, cml_args = widthInBytes (wordWidth dflags)
, cml_ret_args = ret_args
, cml_ret_off = ret_off }
graph' <- lgraphOfAGraph ( suspend <*>
midCall <*>
resume <*>
copyout <*>
mkLast jump, tscp)
case toBlockList graph' of
[one] -> let (_, middle', last) = blockSplit one
in return (blockJoin entry (middle `blockAppend` middle') last)
_ -> panic "lowerSafeForeignCall0"
-- Block doesn't end in a safe foreign call:
| otherwise = return block
foreignLbl :: FastString -> CmmExpr
foreignLbl name = CmmLit (CmmLabel (mkForeignLabel name Nothing ForeignLabelInExternalPackage IsFunction))
callSuspendThread :: DynFlags -> LocalReg -> Bool -> CmmNode O O
callSuspendThread dflags id intrbl =
CmmUnsafeForeignCall
(ForeignTarget (foreignLbl (fsLit "suspendThread"))
(ForeignConvention CCallConv [AddrHint, NoHint] [AddrHint] CmmMayReturn))
[id] [CmmReg (CmmGlobal BaseReg), mkIntExpr dflags (fromEnum intrbl)]
callResumeThread :: LocalReg -> LocalReg -> CmmNode O O
callResumeThread new_base id =
CmmUnsafeForeignCall
(ForeignTarget (foreignLbl (fsLit "resumeThread"))
(ForeignConvention CCallConv [AddrHint] [AddrHint] CmmMayReturn))
[new_base] [CmmReg (CmmLocal id)]
-- -----------------------------------------------------------------------------
plusW :: DynFlags -> ByteOff -> WordOff -> ByteOff
plusW dflags b w = b + w * wORD_SIZE dflags
data StackSlot = Occupied | Empty
-- Occupied: a return address or part of an update frame
instance Outputable StackSlot where
ppr Occupied = text "XXX"
ppr Empty = text "---"
dropEmpty :: WordOff -> [StackSlot] -> Maybe [StackSlot]
dropEmpty 0 ss = Just ss
dropEmpty n (Empty : ss) = dropEmpty (n-1) ss
dropEmpty _ _ = Nothing
isEmpty :: StackSlot -> Bool
isEmpty Empty = True
isEmpty _ = False
localRegBytes :: DynFlags -> LocalReg -> ByteOff
localRegBytes dflags r
= roundUpToWords dflags (widthInBytes (typeWidth (localRegType r)))
localRegWords :: DynFlags -> LocalReg -> WordOff
localRegWords dflags = toWords dflags . localRegBytes dflags
toWords :: DynFlags -> ByteOff -> WordOff
toWords dflags x = x `quot` wORD_SIZE dflags
insertReloads :: StackMap -> [CmmNode O O]
insertReloads stackmap =
[ CmmAssign (CmmLocal r) (CmmLoad (CmmStackSlot Old sp)
(localRegType r))
| (r,sp) <- stackSlotRegs stackmap
]
stackSlotRegs :: StackMap -> [(LocalReg, StackLoc)]
stackSlotRegs sm = eltsUFM (sm_regs sm)
| tjakway/ghcjvm | compiler/cmm/CmmLayoutStack.hs | bsd-3-clause | 42,312 | 1 | 25 | 12,462 | 7,335 | 3,924 | 3,411 | 510 | 6 |
module Main (main) where
main :: IO ()
main = return ()
| sonyandy/wart | tools/wartc.hs | bsd-3-clause | 57 | 0 | 6 | 13 | 29 | 16 | 13 | 3 | 1 |
{-
(c) The University of Glasgow, 2004-2006
Module
~~~~~~~~~~
Simply the name of a module, represented as a FastString.
These are Uniquable, hence we can build Maps with Modules as
the keys.
-}
{-# LANGUAGE DeriveDataTypeable #-}
module Module
(
-- * The ModuleName type
ModuleName,
pprModuleName,
moduleNameFS,
moduleNameString,
moduleNameSlashes, moduleNameColons,
mkModuleName,
mkModuleNameFS,
stableModuleNameCmp,
-- * The PackageKey type
PackageKey,
fsToPackageKey,
packageKeyFS,
stringToPackageKey,
packageKeyString,
stablePackageKeyCmp,
-- * Wired-in PackageKeys
-- $wired_in_packages
primPackageKey,
integerPackageKey,
basePackageKey,
rtsPackageKey,
thPackageKey,
dphSeqPackageKey,
dphParPackageKey,
mainPackageKey,
thisGhcPackageKey,
interactivePackageKey, isInteractiveModule,
wiredInPackageKeys,
-- * The Module type
Module(Module),
modulePackageKey, moduleName,
pprModule,
mkModule,
stableModuleCmp,
HasModule(..),
ContainsModule(..),
-- * The ModuleLocation type
ModLocation(..),
addBootSuffix, addBootSuffix_maybe, addBootSuffixLocn,
-- * Module mappings
ModuleEnv,
elemModuleEnv, extendModuleEnv, extendModuleEnvList,
extendModuleEnvList_C, plusModuleEnv_C,
delModuleEnvList, delModuleEnv, plusModuleEnv, lookupModuleEnv,
lookupWithDefaultModuleEnv, mapModuleEnv, mkModuleEnv, emptyModuleEnv,
moduleEnvKeys, moduleEnvElts, moduleEnvToList,
unitModuleEnv, isEmptyModuleEnv,
foldModuleEnv, extendModuleEnvWith, filterModuleEnv,
-- * ModuleName mappings
ModuleNameEnv,
-- * Sets of Modules
ModuleSet,
emptyModuleSet, mkModuleSet, moduleSetElts, extendModuleSet, elemModuleSet
) where
import Config
import Outputable
import Unique
import UniqFM
import FastString
import Binary
import Util
import {-# SOURCE #-} Packages
import GHC.PackageDb (BinaryStringRep(..))
import Data.Data
import Data.Map (Map)
import qualified Data.Map as Map
import qualified FiniteMap as Map
import System.FilePath
{-
************************************************************************
* *
\subsection{Module locations}
* *
************************************************************************
-}
-- | Where a module lives on the file system: the actual locations
-- of the .hs, .hi and .o files, if we have them
data ModLocation
= ModLocation {
ml_hs_file :: Maybe FilePath,
-- The source file, if we have one. Package modules
-- probably don't have source files.
ml_hi_file :: FilePath,
-- Where the .hi file is, whether or not it exists
-- yet. Always of form foo.hi, even if there is an
-- hi-boot file (we add the -boot suffix later)
ml_obj_file :: FilePath
-- Where the .o file is, whether or not it exists yet.
-- (might not exist either because the module hasn't
-- been compiled yet, or because it is part of a
-- package with a .a file)
} deriving Show
instance Outputable ModLocation where
ppr = text . show
{-
For a module in another package, the hs_file and obj_file
components of ModLocation are undefined.
The locations specified by a ModLocation may or may not
correspond to actual files yet: for example, even if the object
file doesn't exist, the ModLocation still contains the path to
where the object file will reside if/when it is created.
-}
addBootSuffix :: FilePath -> FilePath
-- ^ Add the @-boot@ suffix to .hs, .hi and .o files
addBootSuffix path = path ++ "-boot"
addBootSuffix_maybe :: Bool -> FilePath -> FilePath
-- ^ Add the @-boot@ suffix if the @Bool@ argument is @True@
addBootSuffix_maybe is_boot path
| is_boot = addBootSuffix path
| otherwise = path
addBootSuffixLocn :: ModLocation -> ModLocation
-- ^ Add the @-boot@ suffix to all file paths associated with the module
addBootSuffixLocn locn
= locn { ml_hs_file = fmap addBootSuffix (ml_hs_file locn)
, ml_hi_file = addBootSuffix (ml_hi_file locn)
, ml_obj_file = addBootSuffix (ml_obj_file locn) }
{-
************************************************************************
* *
\subsection{The name of a module}
* *
************************************************************************
-}
-- | A ModuleName is essentially a simple string, e.g. @Data.List@.
newtype ModuleName = ModuleName FastString
deriving Typeable
instance Uniquable ModuleName where
getUnique (ModuleName nm) = getUnique nm
instance Eq ModuleName where
nm1 == nm2 = getUnique nm1 == getUnique nm2
-- Warning: gives an ordering relation based on the uniques of the
-- FastStrings which are the (encoded) module names. This is _not_
-- a lexicographical ordering.
instance Ord ModuleName where
nm1 `compare` nm2 = getUnique nm1 `compare` getUnique nm2
instance Outputable ModuleName where
ppr = pprModuleName
instance Binary ModuleName where
put_ bh (ModuleName fs) = put_ bh fs
get bh = do fs <- get bh; return (ModuleName fs)
instance BinaryStringRep ModuleName where
fromStringRep = mkModuleNameFS . mkFastStringByteString
toStringRep = fastStringToByteString . moduleNameFS
instance Data ModuleName where
-- don't traverse?
toConstr _ = abstractConstr "ModuleName"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "ModuleName"
stableModuleNameCmp :: ModuleName -> ModuleName -> Ordering
-- ^ Compares module names lexically, rather than by their 'Unique's
stableModuleNameCmp n1 n2 = moduleNameFS n1 `compare` moduleNameFS n2
pprModuleName :: ModuleName -> SDoc
pprModuleName (ModuleName nm) =
getPprStyle $ \ sty ->
if codeStyle sty
then ztext (zEncodeFS nm)
else ftext nm
moduleNameFS :: ModuleName -> FastString
moduleNameFS (ModuleName mod) = mod
moduleNameString :: ModuleName -> String
moduleNameString (ModuleName mod) = unpackFS mod
mkModuleName :: String -> ModuleName
mkModuleName s = ModuleName (mkFastString s)
mkModuleNameFS :: FastString -> ModuleName
mkModuleNameFS s = ModuleName s
-- |Returns the string version of the module name, with dots replaced by slashes.
--
moduleNameSlashes :: ModuleName -> String
moduleNameSlashes = dots_to_slashes . moduleNameString
where dots_to_slashes = map (\c -> if c == '.' then pathSeparator else c)
-- |Returns the string version of the module name, with dots replaced by underscores.
--
moduleNameColons :: ModuleName -> String
moduleNameColons = dots_to_colons . moduleNameString
where dots_to_colons = map (\c -> if c == '.' then ':' else c)
{-
************************************************************************
* *
\subsection{A fully qualified module}
* *
************************************************************************
-}
-- | A Module is a pair of a 'PackageKey' and a 'ModuleName'.
data Module = Module {
modulePackageKey :: !PackageKey, -- pkg-1.0
moduleName :: !ModuleName -- A.B.C
}
deriving (Eq, Ord, Typeable)
instance Uniquable Module where
getUnique (Module p n) = getUnique (packageKeyFS p `appendFS` moduleNameFS n)
instance Outputable Module where
ppr = pprModule
instance Binary Module where
put_ bh (Module p n) = put_ bh p >> put_ bh n
get bh = do p <- get bh; n <- get bh; return (Module p n)
instance Data Module where
-- don't traverse?
toConstr _ = abstractConstr "Module"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "Module"
-- | This gives a stable ordering, as opposed to the Ord instance which
-- gives an ordering based on the 'Unique's of the components, which may
-- not be stable from run to run of the compiler.
stableModuleCmp :: Module -> Module -> Ordering
stableModuleCmp (Module p1 n1) (Module p2 n2)
= (p1 `stablePackageKeyCmp` p2) `thenCmp`
(n1 `stableModuleNameCmp` n2)
mkModule :: PackageKey -> ModuleName -> Module
mkModule = Module
pprModule :: Module -> SDoc
pprModule mod@(Module p n) =
pprPackagePrefix p mod <> pprModuleName n
pprPackagePrefix :: PackageKey -> Module -> SDoc
pprPackagePrefix p mod = getPprStyle doc
where
doc sty
| codeStyle sty =
if p == mainPackageKey
then empty -- never qualify the main package in code
else ztext (zEncodeFS (packageKeyFS p)) <> char '_'
| qualModule sty mod = ppr (modulePackageKey mod) <> char ':'
-- the PrintUnqualified tells us which modules have to
-- be qualified with package names
| otherwise = empty
class ContainsModule t where
extractModule :: t -> Module
class HasModule m where
getModule :: m Module
{-
************************************************************************
* *
\subsection{PackageKey}
* *
************************************************************************
-}
-- | A string which uniquely identifies a package. For wired-in packages,
-- it is just the package name, but for user compiled packages, it is a hash.
-- ToDo: when the key is a hash, we can do more clever things than store
-- the hex representation and hash-cons those strings.
newtype PackageKey = PId FastString deriving( Eq, Typeable )
-- here to avoid module loops with PackageConfig
instance Uniquable PackageKey where
getUnique pid = getUnique (packageKeyFS pid)
-- Note: *not* a stable lexicographic ordering, a faster unique-based
-- ordering.
instance Ord PackageKey where
nm1 `compare` nm2 = getUnique nm1 `compare` getUnique nm2
instance Data PackageKey where
-- don't traverse?
toConstr _ = abstractConstr "PackageKey"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "PackageKey"
stablePackageKeyCmp :: PackageKey -> PackageKey -> Ordering
-- ^ Compares package ids lexically, rather than by their 'Unique's
stablePackageKeyCmp p1 p2 = packageKeyFS p1 `compare` packageKeyFS p2
instance Outputable PackageKey where
ppr pk = getPprStyle $ \sty -> sdocWithDynFlags $ \dflags ->
text (packageKeyPackageIdString dflags pk)
-- Don't bother qualifying if it's wired in!
<> (if qualPackage sty pk && not (pk `elem` wiredInPackageKeys)
then char '@' <> ftext (packageKeyFS pk)
else empty)
instance Binary PackageKey where
put_ bh pid = put_ bh (packageKeyFS pid)
get bh = do { fs <- get bh; return (fsToPackageKey fs) }
instance BinaryStringRep PackageKey where
fromStringRep = fsToPackageKey . mkFastStringByteString
toStringRep = fastStringToByteString . packageKeyFS
fsToPackageKey :: FastString -> PackageKey
fsToPackageKey = PId
packageKeyFS :: PackageKey -> FastString
packageKeyFS (PId fs) = fs
stringToPackageKey :: String -> PackageKey
stringToPackageKey = fsToPackageKey . mkFastString
packageKeyString :: PackageKey -> String
packageKeyString = unpackFS . packageKeyFS
-- -----------------------------------------------------------------------------
-- $wired_in_packages
-- Certain packages are known to the compiler, in that we know about certain
-- entities that reside in these packages, and the compiler needs to
-- declare static Modules and Names that refer to these packages. Hence
-- the wired-in packages can't include version numbers, since we don't want
-- to bake the version numbers of these packages into GHC.
--
-- So here's the plan. Wired-in packages are still versioned as
-- normal in the packages database, and you can still have multiple
-- versions of them installed. However, for each invocation of GHC,
-- only a single instance of each wired-in package will be recognised
-- (the desired one is selected via @-package@\/@-hide-package@), and GHC
-- will use the unversioned 'PackageKey' below when referring to it,
-- including in .hi files and object file symbols. Unselected
-- versions of wired-in packages will be ignored, as will any other
-- package that depends directly or indirectly on it (much as if you
-- had used @-ignore-package@).
-- Make sure you change 'Packages.findWiredInPackages' if you add an entry here
integerPackageKey, primPackageKey,
basePackageKey, rtsPackageKey,
thPackageKey, dphSeqPackageKey, dphParPackageKey,
mainPackageKey, thisGhcPackageKey, interactivePackageKey :: PackageKey
primPackageKey = fsToPackageKey (fsLit "ghc-prim")
integerPackageKey = fsToPackageKey (fsLit n)
where
n = case cIntegerLibraryType of
IntegerGMP -> "integer-gmp"
IntegerGMP2 -> "integer-gmp"
IntegerSimple -> "integer-simple"
basePackageKey = fsToPackageKey (fsLit "base")
rtsPackageKey = fsToPackageKey (fsLit "rts")
thPackageKey = fsToPackageKey (fsLit "template-haskell")
dphSeqPackageKey = fsToPackageKey (fsLit "dph-seq")
dphParPackageKey = fsToPackageKey (fsLit "dph-par")
thisGhcPackageKey = fsToPackageKey (fsLit "ghc")
interactivePackageKey = fsToPackageKey (fsLit "interactive")
-- | This is the package Id for the current program. It is the default
-- package Id if you don't specify a package name. We don't add this prefix
-- to symbol names, since there can be only one main package per program.
mainPackageKey = fsToPackageKey (fsLit "main")
isInteractiveModule :: Module -> Bool
isInteractiveModule mod = modulePackageKey mod == interactivePackageKey
wiredInPackageKeys :: [PackageKey]
wiredInPackageKeys = [ primPackageKey,
integerPackageKey,
basePackageKey,
rtsPackageKey,
thPackageKey,
thisGhcPackageKey,
dphSeqPackageKey,
dphParPackageKey ]
{-
************************************************************************
* *
\subsection{@ModuleEnv@s}
* *
************************************************************************
-}
-- | A map keyed off of 'Module's
newtype ModuleEnv elt = ModuleEnv (Map Module elt)
filterModuleEnv :: (Module -> a -> Bool) -> ModuleEnv a -> ModuleEnv a
filterModuleEnv f (ModuleEnv e) = ModuleEnv (Map.filterWithKey f e)
elemModuleEnv :: Module -> ModuleEnv a -> Bool
elemModuleEnv m (ModuleEnv e) = Map.member m e
extendModuleEnv :: ModuleEnv a -> Module -> a -> ModuleEnv a
extendModuleEnv (ModuleEnv e) m x = ModuleEnv (Map.insert m x e)
extendModuleEnvWith :: (a -> a -> a) -> ModuleEnv a -> Module -> a -> ModuleEnv a
extendModuleEnvWith f (ModuleEnv e) m x = ModuleEnv (Map.insertWith f m x e)
extendModuleEnvList :: ModuleEnv a -> [(Module, a)] -> ModuleEnv a
extendModuleEnvList (ModuleEnv e) xs = ModuleEnv (Map.insertList xs e)
extendModuleEnvList_C :: (a -> a -> a) -> ModuleEnv a -> [(Module, a)]
-> ModuleEnv a
extendModuleEnvList_C f (ModuleEnv e) xs = ModuleEnv (Map.insertListWith f xs e)
plusModuleEnv_C :: (a -> a -> a) -> ModuleEnv a -> ModuleEnv a -> ModuleEnv a
plusModuleEnv_C f (ModuleEnv e1) (ModuleEnv e2) = ModuleEnv (Map.unionWith f e1 e2)
delModuleEnvList :: ModuleEnv a -> [Module] -> ModuleEnv a
delModuleEnvList (ModuleEnv e) ms = ModuleEnv (Map.deleteList ms e)
delModuleEnv :: ModuleEnv a -> Module -> ModuleEnv a
delModuleEnv (ModuleEnv e) m = ModuleEnv (Map.delete m e)
plusModuleEnv :: ModuleEnv a -> ModuleEnv a -> ModuleEnv a
plusModuleEnv (ModuleEnv e1) (ModuleEnv e2) = ModuleEnv (Map.union e1 e2)
lookupModuleEnv :: ModuleEnv a -> Module -> Maybe a
lookupModuleEnv (ModuleEnv e) m = Map.lookup m e
lookupWithDefaultModuleEnv :: ModuleEnv a -> a -> Module -> a
lookupWithDefaultModuleEnv (ModuleEnv e) x m = Map.findWithDefault x m e
mapModuleEnv :: (a -> b) -> ModuleEnv a -> ModuleEnv b
mapModuleEnv f (ModuleEnv e) = ModuleEnv (Map.mapWithKey (\_ v -> f v) e)
mkModuleEnv :: [(Module, a)] -> ModuleEnv a
mkModuleEnv xs = ModuleEnv (Map.fromList xs)
emptyModuleEnv :: ModuleEnv a
emptyModuleEnv = ModuleEnv Map.empty
moduleEnvKeys :: ModuleEnv a -> [Module]
moduleEnvKeys (ModuleEnv e) = Map.keys e
moduleEnvElts :: ModuleEnv a -> [a]
moduleEnvElts (ModuleEnv e) = Map.elems e
moduleEnvToList :: ModuleEnv a -> [(Module, a)]
moduleEnvToList (ModuleEnv e) = Map.toList e
unitModuleEnv :: Module -> a -> ModuleEnv a
unitModuleEnv m x = ModuleEnv (Map.singleton m x)
isEmptyModuleEnv :: ModuleEnv a -> Bool
isEmptyModuleEnv (ModuleEnv e) = Map.null e
foldModuleEnv :: (a -> b -> b) -> b -> ModuleEnv a -> b
foldModuleEnv f x (ModuleEnv e) = Map.foldRightWithKey (\_ v -> f v) x e
-- | A set of 'Module's
type ModuleSet = Map Module ()
mkModuleSet :: [Module] -> ModuleSet
extendModuleSet :: ModuleSet -> Module -> ModuleSet
emptyModuleSet :: ModuleSet
moduleSetElts :: ModuleSet -> [Module]
elemModuleSet :: Module -> ModuleSet -> Bool
emptyModuleSet = Map.empty
mkModuleSet ms = Map.fromList [(m,()) | m <- ms ]
extendModuleSet s m = Map.insert m () s
moduleSetElts = Map.keys
elemModuleSet = Map.member
{-
A ModuleName has a Unique, so we can build mappings of these using
UniqFM.
-}
-- | A map keyed off of 'ModuleName's (actually, their 'Unique's)
type ModuleNameEnv elt = UniqFM elt
| green-haskell/ghc | compiler/basicTypes/Module.hs | bsd-3-clause | 18,016 | 0 | 16 | 4,292 | 3,472 | 1,865 | 1,607 | 283 | 3 |
{-# LANGUAGE TemplateHaskell, TypeFamilies, DeriveDataTypeable, NamedFieldPuns #-}
module Distribution.Server.Features.TarIndexCache.State (
TarIndexCache(..)
, initialTarIndexCache
, GetTarIndexCache(GetTarIndexCache)
, ReplaceTarIndexCache(ReplaceTarIndexCache)
, FindTarIndex(FindTarIndex)
, SetTarIndex(SetTarIndex)
) where
-- TODO: use strict map? (Can we rely on containers >= 0.5?)
import Data.Typeable (Typeable)
import Control.Monad.Reader (ask, asks)
import Control.Monad.State (put, modify)
import Data.Map (Map)
import qualified Data.Map as Map
import Control.Applicative ((<$>))
import Data.Acid (Query, Update, makeAcidic)
import Data.SafeCopy (base, deriveSafeCopy)
import Distribution.Server.Framework.BlobStorage
import Distribution.Server.Framework.MemSize
data TarIndexCache = TarIndexCache {
tarIndexCacheMap :: Map BlobId BlobId
}
deriving (Eq, Show, Typeable)
$(deriveSafeCopy 0 'base ''TarIndexCache)
instance MemSize TarIndexCache where
memSize st = 2 + memSize (tarIndexCacheMap st)
initialTarIndexCache :: TarIndexCache
initialTarIndexCache = TarIndexCache (Map.empty)
getTarIndexCache :: Query TarIndexCache TarIndexCache
getTarIndexCache = ask
replaceTarIndexCache :: TarIndexCache -> Update TarIndexCache ()
replaceTarIndexCache = put
getTarIndexCacheMap :: Query TarIndexCache (Map BlobId BlobId)
getTarIndexCacheMap = asks tarIndexCacheMap
modifyTarIndexCacheMap :: (Map BlobId BlobId -> Map BlobId BlobId)
-> Update TarIndexCache ()
modifyTarIndexCacheMap f = modify $ \st@TarIndexCache{tarIndexCacheMap} ->
st { tarIndexCacheMap = f tarIndexCacheMap }
findTarIndex :: BlobId -> Query TarIndexCache (Maybe BlobId)
findTarIndex blobId = Map.lookup blobId <$> getTarIndexCacheMap
setTarIndex :: BlobId -> BlobId -> Update TarIndexCache ()
setTarIndex tar index = modifyTarIndexCacheMap (Map.insert tar index)
makeAcidic ''TarIndexCache [
'getTarIndexCache
, 'replaceTarIndexCache
, 'findTarIndex
, 'setTarIndex
]
| ocharles/hackage-server | Distribution/Server/Features/TarIndexCache/State.hs | bsd-3-clause | 2,048 | 0 | 9 | 312 | 499 | 283 | 216 | 54 | 1 |
{-# LANGUAGE QuasiQuotes, TypeFamilies, TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ViewPatterns #-}
module Yesod.Auth.Routes where
import Yesod.Core
import Data.Text (Text)
data Auth = Auth
mkYesodSubData "Auth" [parseRoutes|
/check CheckR GET
/login LoginR GET
/logout LogoutR GET POST
/page/#Text/*Texts PluginR
|]
| pikajude/yesod | yesod-auth/Yesod/Auth/Routes.hs | mit | 582 | 0 | 5 | 130 | 49 | 33 | 16 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module System.Logging.Facade.Journald.Internal where
import Data.HashMap.Strict
import Data.Monoid
import Data.String
import qualified Data.Text.Encoding as Text
import System.Logging.Facade.Types
import Systemd.Journal
logRecordToJournalFields :: LogRecord -> JournalFields
logRecordToJournalFields record =
locationFields <>
priority (logLevelToPriority (logRecordLevel record)) <>
message (fromString (logRecordMessage record))
where
locationFields =
fromList $ maybe [] toLocationFields (logRecordLocation record)
toLocationFields loc =
("CODE_FILE", encodeUtf8 (locationFile loc)) :
("CODE_LINE", fromString (show (locationLine loc))) :
[]
encodeUtf8 = Text.encodeUtf8 . fromString
logLevelToPriority :: LogLevel -> Priority
logLevelToPriority l = case l of
TRACE -> Debug
DEBUG -> Debug
INFO -> Info
WARN -> Warning
ERROR -> Error
| zalora/logging-facade-journald | src/System/Logging/Facade/Journald/Internal.hs | mit | 977 | 0 | 14 | 202 | 244 | 133 | 111 | 27 | 5 |
{-# htermination (^^) :: (Ratio Int) -> Int -> (Ratio Int) #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_CARETCARET_3.hs | mit | 63 | 0 | 2 | 12 | 3 | 2 | 1 | 1 | 0 |
module RAE
where
import Blaze
import Tree
import Text.Printf
import Data.Number.LogFloat (logFloat,fromLogFloat)
import Data.List
-- Pretty-print expressions
showE :: State -> String
showE (Node (SNode _ (DoubleDatum d)) [] _) = printf "%0.3f" d
showE (Node (SNode _ (StringDatum s)) [] _) = s
showE (Node (SNode _ (StringDatum s)) cs _)
| (s == "and" ) = printf "(and %s)" (intercalate " " scs)
| (s == "list") = printf "(list %s)" (intercalate " " scs)
where scs = map showE cs
showE (Node (SNode _ (StringDatum s)) [c1] _)
| (s == "eval" ) = printf "(eval %s)" sc1
| (s == "thunk" ) = printf "(thunk %s)" sc1
| (s == "unthunk") = printf "(unthunk %s)" sc1
| (s == "abs" ) = printf "abs(%s)" sc1
| (s == "neg" ) = printf "-(%s)" sc1
| (s == "flip" ) = printf "flip(%s)" sc1
| (s == "sampint") = printf "sampint(%s)" sc1
| (s == "draw" ) = printf "draw(%s)" sc1
| (s == "num" ) = sc1
| (s == "var" ) = sc1
| otherwise = error $ printf "showE: invalid unary op %s" s
where sc1 = showE c1
showE (Node (SNode _ (StringDatum s)) [c1,c2] _)
| (s == "lambda") = printf "(lambda %s %s)" sc1 sc2
| (s == "app" ) = printf "(%s %s)" sc1 sc2
| (s == "+" ) = printf "(%s + %s)" sc1 sc2
| (s == "-" ) = printf "(%s - %s)" sc1 sc2
| (s == "*" ) = printf "(%s * %s)" sc1 sc2
| (s == "exp" ) = printf "(%s ^ %s)" sc1 sc2
| (s == "max" ) = printf "max(%s, %s)" sc1 sc2
| otherwise = error $ printf "showE: invalid binary op %s" s
where [sc1,sc2] = map showE [c1,c2]
showE (Node (SNode _ (StringDatum s)) [c1,c2,c3] _)
| (s == "let" ) = printf "(let (%s = %s) in %s)" sc1 sc2 sc3
| (s == "if" ) = printf "(if %s then %s else %s)" sc1 sc2 sc3
| otherwise = error $ printf "showE: invalid ternary op %s" s
where [sc1,sc2,sc3] = map showE [c1,c2,c3]
showE (Node (SNode _ (StringDatum s)) cs _)
| otherwise = error $ printf "showE: invalid multi op %s" s
where scs = map showE cs
-- Helper functions for building expressions.
nullaryOp :: String -> State
nullaryOp s = mkStringData s
unaryOp :: String -> State -> State
unaryOp s e1 = mkStringData s `addChild` e1
binaryOp :: String -> State -> State -> State
binaryOp s e1 e2 = mkStringData s `collectStates` [e2,e1]
ternaryOp :: String -> State -> State -> State -> State
ternaryOp s e1 e2 e3 = mkStringData s `collectStates` [e3,e2,e1]
multiOp :: String -> [State] -> State
multiOp s es = mkStringData s `collectStates` (reverse es)
-- In the absence of a parser, I use these functions to build an AST
thunkE = unaryOp "thunk"
unthunkE = unaryOp "unthunk"
lambdaE = binaryOp "lambda"
appE = binaryOp "app"
evalE = unaryOp "eval"
letE = ternaryOp "let"
absE = unaryOp "abs"
negE = unaryOp "neg"
addE = binaryOp "+"
subE = binaryOp "-"
mulE = binaryOp "*"
expE = binaryOp "exp"
maxE = binaryOp "max"
trueE = nullaryOp "true"
falseE = nullaryOp "false"
andE = multiOp "and"
ifE = ternaryOp "if"
flipE = unaryOp "flip"
sampintE = unaryOp "sampint"
drawE = unaryOp "draw"
listE = multiOp "list"
numE = mkDoubleData
varE = mkStringData
-- Building an evaluator for testing; this will eventaully be worked
-- into a Kernel and Density.
getCode :: State -> State
getCode = getTA ["code"]
getSteps :: State -> State
getSteps = getTA ["steps"]
getProbs :: State -> State
getProbs = getTA ["probs"]
getEnv :: State -> State
getEnv = getTA ["env"]
showEnv :: State -> String
showEnv s | (null senvcs) = "(empty env)"
| otherwise = intercalate ", " $ map se senvcs
where senvcs = children $ getEnv s
se e = printf "%s => (%s, %s)" (tagged e)
(showE $ getTA ["bstate"] s) (showEnv $ getTA ["benv"] s)
showNode :: State -> String
showNode s = printf "Code: %s\nEnv: %s\nProb: %s" (showE . getCode $ s)
(showEnv s)
(show . map doubleVal . children . getProbs $ s)
isValue :: State -> Bool
isValue (Node (SNode _ (DoubleDatum _ )) _ _) = True
isValue (Node (SNode _ (StringDatum "list")) cs _) = all isValue cs
isValue (Node (SNode _ (StringDatum ctag )) cs _) =
ctag `elem` ["thunk","lambda","true","false"]
envlookup :: String -> State -> Maybe State
envlookup ltag s | (ltag `elem` map tagged cs) = Just (getTagged ltag cs)
| otherwise = Nothing
where cs = children . getEnv $ s
evalhelper :: State -> ([State],[Double],[State])
evalhelper sl = ([(Node (SNode [] newctag) newvals "")], [1.0], [])
where (Node (SNode [] (StringDatum "list")) (newhead:newvals) _) = sl
(Node (SNode [] newctag ) _ _) = newhead
ifhelper :: State -> State -> State -> ([State],[Double],[State])
ifhelper (Node (SNode [] (StringDatum "true" )) [] _) st sf = ([st], [1.0], [])
ifhelper (Node (SNode [] (StringDatum "false")) [] _) st sf = ([sf], [1.0], [])
ifhelper _ _ _ =
error "ifhelper: conditional must be a boolean"
apphelper :: State -> State -> ([State],[Double],[State])
apphelper op operand = ([ope], [1.0], [operand `tag` (stringVal opv)])
where (Node (SNode [] (StringDatum "lambda")) [opv,ope] _) = op
varhelper :: State -> State -> ([State],[Double],[State])
varhelper s v = ([s'], [1.0], [])
where s' = maybe v id $ envlookup (stringVal v) s
andhelper :: [State] -> ([State],[Double],[State])
andhelper cs
| (not allbool) = error "andhelper: expects only boolean values"
| otherwise = (if alltrue then [trueE] else [falseE], [1.0], [])
where allbool = all (flip elem ["true","false"] . stringVal) cs
alltrue = all ((== "true") . stringVal) cs
fliphelper :: State -> ([State],[Double],[State])
fliphelper c = ([trueE, falseE], [doubleVal c, 1.0 - doubleVal c], [])
sampinthelper :: State -> ([State],[Double],[State])
sampinthelper c = (vals, probs, [])
where vals = map numE [0..(doubleVal c)]
nvals = length vals
probs = replicate nvals (1 / fromIntegral nvals)
drawhelper :: State -> ([State],[Double],[State])
drawhelper c = (vals, probs, [])
where vals = children c
nvals = length vals
probs = replicate nvals (1 / fromIntegral nvals)
holehelper :: State -> String -> [State] -> ([State],[Double],[State])
holehelper s ct cs = (map mkStep e', p', env')
where (vs,(e:es)) = break (not . isValue) cs
(e',p',env') = step' s e
cs' x = vs ++ [x] ++ es
mkStep x = (Node (SNode [] (StringDatum ct)) (cs' x) "")
notFinal :: State -> State -> Bool
notFinal s v@(Node (SNode [] (StringDatum _ )) [] _) = v /= v'
where ([v'],_,_) = varhelper s v
notFinal s v@(Node (SNode [] (StringDatum "list")) cs _) = any (notFinal s) cs
notFinal _ v = not . isValue $ v
-- Step takes a state and produces a list of possible single-step
-- reductions, a list of associated probabilities, and a set of new
-- variable bindings to add to the environment.
step :: State -> ([State],[Double],[State])
step s = step' s (getCode s)
step' :: State -> State -> ([State],[Double],[State])
step' s (Node (SNode [] (StringDatum ctag )) cs@[c] _)
| (ctag == "eval" ) = evalhelper c
| (notFinal s c ) = holehelper s ctag cs
| (ctag == "sampint") = sampinthelper c
| (ctag == "unthunk") = ([head . children $ c], [1.0], [])
| (ctag == "flip" ) = fliphelper c
| (ctag == "draw" ) = drawhelper c
| (ctag == "and" ) = andhelper cs
| (ctag == "abs" ) = ([numE (abs $ doubleVal c)], [1.0], [])
| (ctag == "neg" ) = ([numE (negate $ doubleVal c)], [1.0], [])
step' s (Node (SNode [] (StringDatum ctag )) cs@[c1,c2] _)
| (notFinal s c1) = holehelper s ctag cs
| (ctag == "app" ) = apphelper c1 c2
| (notFinal s c2 ) = holehelper s ctag cs
| (ctag == "+" ) = ([numE (doubleVal c1 + doubleVal c2)], [1.0], [])
| (ctag == "-" ) = ([numE (doubleVal c1 - doubleVal c2)], [1.0], [])
| (ctag == "*" ) = ([numE (doubleVal c1 * doubleVal c2)], [1.0], [])
| (ctag == "exp" ) = ([numE (doubleVal c1 ** doubleVal c2)], [1.0], [])
| (ctag == "max" ) = ([numE (max (doubleVal c1) (doubleVal c2))], [1.0], [])
| (ctag == "and" ) = andhelper cs
step' s (Node (SNode [] (StringDatum "let")) cs@[c1,c2,c3] _)
| (notFinal s c2) = holehelper s "let" cs
| otherwise = ([c3], [1.0], [c2 `tag` (stringVal c1)])
step' s (Node (SNode [] (StringDatum "if" )) cs@[c1,c2,c3] _)
| (notFinal s c1) = holehelper s "if" cs
| otherwise = ifhelper c1 c2 c3
step' s s'@(Node (SNode [] (StringDatum ctag )) cs _)
| (any (notFinal s) cs) = holehelper s ctag cs
| (ctag == "and" ) = andhelper cs
| (null cs ) = varhelper s s'
-- Code except the program to be executed and the query.
sdefs :: State
sdefs = (andE [(flipE (numE 0.5)), (flipE (numE 0.2)), (flipE (numE 0.1))])
{-
(letE (varE "noisy=")
(lambdaE (varE "x")
(lambdaE (varE "y")
(flipE (expE (numE 0.1)
(absE (subE (varE "x") (varE "y")))))))
(letE (varE "rae")
(thunkE sprogram)
(letE (varE "proc-from-expr")
(lambdaE (varE "expr")
(evalE (listE [(varE "lambda"), (varE "x"), (varE "expr")])))
(letE (varE "my-expr")
(unthunkE (varE "rae"))
(letE (varE "my-proc")
(appE (varE "proc-from-expr") (varE "my-expr"))
squery)))))
-}
-- Program that generates random arithmetic expressions.
sprogram :: State
sprogram =
(ifE (flipE (numE 0.8))
(ifE (flipE (numE 0.5)) (varE "x") (sampintE (numE 10)))
(listE [(drawE
(listE [(varE "+"), (varE "-"), (varE "*"), (varE "max")])),
(unthunkE (varE "rae")),
(unthunkE (varE "rae"))]))
-- Query conditioned on the random arithmetic expression.
squery :: State
squery =
(andE [(appE (appE (varE "noisy=")
(appE (varE "my-proc") (numE (-2)))) (numE 5)),
(appE (appE (varE "noisy=")
(appE (varE "my-proc") (numE 0))) (numE 1)),
(appE (appE (varE "noisy=")
(appE (varE "my-proc") (numE 1))) (numE 2)),
(appE (appE (varE "noisy=")
(appE (varE "my-proc") (numE 2))) (numE 5)),
(appE (appE (varE "noisy=")
(appE (varE "my-proc") (numE 3))) (numE 10))])
buildTree :: State -> State
buildTree s
| (isValue code) = build [code,dummyState,dummyState,env]
| otherwise = build [code,steps, probs, env]
where code = getCode s
env = getEnv s
(steps', probs', env') = step s
env'' = dummyState { children = (env' ++ children env) }
mkHead x = dummyState `collectStates`
[x `tag` "code", env'' `tag` "env"]
steps'' = map mkHead steps'
steps = dummyState `collectStates` map buildTree steps''
probs = dummyState `collectStates` map mkDoubleData probs'
build cs = dummyState `collectStates`
zipWith tag cs ["code","steps","probs","env"]
sr :: State
sr = buildTree (dummyState `collectStates` [tcode,tenv])
`collectStates` [tterm,trace]
where tcode = sdefs `tag` "code"
tenv = dummyState `tag` "env"
tterm = flip tag "term" $
dummyState `collectStates` [trueE `tag` "t", falseE `tag` "f"]
trace = flip tag "trace" $
buildTree $
dummyState `collectStates`
[falseE `tag` "code", dummyState `tag` "env"]
sd :: Likelihood
sd s _ = logFloat $ doubleVal $ getTA ["prob"] s
kr :: Kernel
kr = mkURWKernel [["term","t"],["term","f"]]
traceChurch :: TA -> ReportAct
traceChurch ta machines = do
let tr m = printf "%s: %.3g" (showE . getTA ta $ ms m)
(fromLogFloat . topDensity $ m :: Double)
mapM_ (putStrLn . tr) machines
return machines
traceChurchEnv :: TA -> String -> ReportAct
traceChurchEnv ta v machines = do
let tr m = printf "%s: %.3g" (maybe "" showE . envlookup v . getTA ta $ ms m)
(fromLogFloat . topDensity $ m :: Double)
mapM_ (putStrLn . tr) machines
return machines
buildMachine :: Entropy -> Machine
buildMachine e = Machine sr dummyDensity kr e
main :: IO ()
main = foldl (>>=) (run buildMachine)
[ stopAfter 100, traceChurch ["trace","code"]] >>
putStrLn "Run complete!"
| othercriteria/blaze | RAE.hs | mit | 12,894 | 0 | 17 | 3,767 | 5,240 | 2,757 | 2,483 | 251 | 2 |
module HTML where
import Text.Blaze.Html5 hiding (map)
import qualified Text.Blaze.Html5 as H
import Text.Blaze.Html5.Attributes
import Data.List
import DataTypes
import Data.Monoid
proof2html :: BinTree DecoratedSequent -> Html
proof2html (Leaf lab s) =
table $ do
tr $ toHtml ""
tr $ do
td $ hr
td $ lab2html lab
tr $ td $ decoratedSeq2html s
proof2html (Unary lab s t) =
table $ do
tr $ td $ proof2html t
tr $ do
td $ hr
td $ lab2html lab
tr $ td $ decoratedSeq2html s
proof2html (Branch lab l s r) =
table $ do
tr $ do
td $ proof2html l
td $ proof2html r
tr $ do
(td $ hr) ! (colspan $ toValue "2")
td $ lab2html lab
tr $ (td $ decoratedSeq2html s) ! (colspan $ toValue "2")
lab2html :: Label -> Html
lab2html Id = toHtml "id"
lab2html ImpL = preEscapedToHtml "→L"
lab2html ImpR = preEscapedToHtml "→R"
lab2html MonL = preEscapedToHtml "◊L"
lab2html MonR = preEscapedToHtml "◊R"
lambda2html :: LambdaTerm -> Html
lambda2html (C c) = em $ toHtml c
lambda2html (V n) | n < length sanevars && n >= 0 =
toHtml $ sanevars !! n
| otherwise = toHtml $ "v" ++ show n
lambda2html (Lambda x b) = do
preEscapedToHtml "λ"
lambda2html x
toHtml "."
lambda2html b
lambda2html (Eta f) = do
preEscapedToHtml "η("
lambda2html f
toHtml ")"
lambda2html (App f@(Lambda _ _) a) = do
toHtml "("
lambda2html f
toHtml ")("
lambda2html a
toHtml ")"
lambda2html (App f@(_ :*: _) a) = do
toHtml "("
lambda2html f
toHtml ")("
lambda2html a
toHtml ")"
lambda2html (App f a) = do
lambda2html f
toHtml "("
lambda2html a
toHtml ")"
lambda2html (m :*: k) = do
lambda2html m
preEscapedToHtml " ∗ "
lambda2html k
decoratedSeq2html :: DecoratedSequent -> Html
decoratedSeq2html (gamma,c) = mconcat left >> toHtml " => " >> f c where
left = intersperse (toHtml ", ") $ map f gamma
f (DF _ lt f) = lambda2html lt >> toHtml " : " >> formula2html f
-- |Texifies a formula (now with smart parentheses!)
formula2html :: Formula -> Html
formula2html (Atom a _) = toHtml a
formula2html (Var x _) = toHtml x
formula2html (M (Atom a _) _) = preEscapedToHtml "◊" >> toHtml a
formula2html (M (Var x _) _) = preEscapedToHtml "◊" >> toHtml x
formula2html (M f _) = preEscapedToHtml "◊(" >> formula2html f >> toHtml ")"
formula2html (I (Atom a _) f _) = toHtml a >> preEscapedToHtml " → " >> formula2html f
formula2html (I (Var a _) f _) = toHtml a >> preEscapedToHtml " → " >> formula2html f
formula2html (I d@(M _ _) f _) = formula2html d >> preEscapedToHtml " → " >> formula2html f
formula2html (I f g _) = do
toHtml "("
formula2html f
preEscapedToHtml ") → "
formula2html g
| gianlucagiorgolo/glue-tp | HTML.hs | mit | 2,746 | 8 | 14 | 619 | 1,168 | 540 | 628 | 90 | 1 |
{-|
Module : BreadU.Pages.Markup.Common.Utils
Description : HTML markup utils.
Stability : experimental
Portability : POSIX
HTML markup utils.
-}
module BreadU.Pages.Markup.Common.Utils where
import Prelude hiding ( span, div )
import Data.Monoid ( (<>) )
import Data.Text ( Text )
import Text.Blaze.Html5
import qualified Text.Blaze.Html5.Attributes as A
-- | Row for Bootstrap grid. Partially-applied function,
-- it has to be applied to an 'Html'-expression.
row_ :: Html -> Html
row_ = div ! A.class_ "row"
-- | Columns for Bootstrap grid. Partially-applied function,
-- it has to be applied to an 'Html'-expression.
col_1
, col_2
, col_3
, col_4
, col_5
, col_6
, col_7
, col_8
, col_9
, col_10
, col_11
, col_12 :: Html -> Html
col_1 = col_ 1
col_2 = col_ 2
col_3 = col_ 3
col_4 = col_ 4
col_5 = col_ 5
col_6 = col_ 6
col_7 = col_ 7
col_8 = col_ 8
col_9 = col_ 9
col_10 = col_ 10
col_11 = col_ 11
col_12 = col_ 12
-- | Column for Bootstrap grid. Partially-applied function,
-- it has to be applied to an 'Html'-expression.
col_ :: Int -> Html -> Html
col_ width = div ! A.class_ (toValue $ "col-" <> show width)
-- | Font Awesome icon.
fa :: Text -> Html
fa iconName = i ! A.class_ (toValue $ "fa " <> iconName)
! customAttribute "aria-hidden" "true" $ mempty
| denisshevchenko/breadu.info | src/lib/BreadU/Pages/Markup/Common/Utils.hs | mit | 1,433 | 0 | 11 | 397 | 311 | 183 | 128 | 37 | 1 |
module LMH_Interpreter where
import LMH_Lex
import LMH_Parse
import LMH_ExpType
import LMH_TypeInference
import LMH_Evaluator
toString :: Type -> String
toString (TypeVar str) = str
toString (TypeConst str) = str
toString (Arrow (TypeVar str, t)) =
str ++ " -> " ++ (toString t)
toString (Arrow (TypeConst str, t)) =
str ++ " -> " ++ (toString t)
toString (Arrow (t1, t2)) =
"(" ++ (toString t1) ++ ") -> " ++ (toString t2)
-- runLMH "file.hs"
--
-- lexes and parses the MH program text in file.hs and
-- performs static analysis including type inference.
-- Then it enters an interpreter loop: the user inputs an
-- MH expression, which the computer and evaluates,
-- outputting the type and resulting value.
-- The loop is exited by inputting ":q"
runLMH filename = do
progtext <- readFile filename
let lexed = alexScanTokens progtext
termDecls = lmh_parseProg lexed
declVars = checkVars termDecls
env = (\x -> case lookup x termDecls of
Just exp -> exp
-- The case below should never occur due to static analysis
Nothing -> error ("Lookup error - undefined variable: " ++ x))
in if -- this test implements condition 3 in Note 4
all (\x -> (all (\y -> elem y declVars) (freevars (env x)))) declVars
then let tenv = initialiseTEnv declVars
tenv' = inferProg declVars tenv env
in do _ <- putStrLn ""
_ <- printTypes tenv'
runIn tenv' env
else putStrLn "Out-of-scope variables in program."
checkVars [] = []
checkVars ((x,_):trds) =
let xs = checkVars trds
in if notElem x xs then (x:xs)
else error ("Duplicate declaration for variable " ++ x)
inferProg [] tenv _ = tenv
inferProg (x:xs) tenv env =
let (s',t') = inferType tenv (env x)
tenv' = typeSubstTEnv tenv s'
(Just t) = (lookup x tenv')
s = mgu t t'
in inferProg xs (typeSubstTEnv tenv' s) env
printTypes [] = putStrLn ""
printTypes ((x,t):tenv) =
do _ <- putStrLn (x ++ " :: " ++ (toString t))
printTypes tenv
runIn tenv env = do
_ <- putStr "LMH> "
textuser <- getLine
if textuser == ":q" then putStrLn "LMH goodbye!"
else let lexeduser = alexScanTokens textuser
exp = lmh_parseExp lexeduser
(_, t) = inferType tenv exp
in do _ <- putStr "Type: "
_ <- putStrLn (toString t)
_ <- putStr "Value: "
_ <- print (evaluate env exp)
runIn tenv env
| jaanos/TPJ-2015-16 | lmh/LMH_Interpreter.hs | mit | 2,531 | 7 | 18 | 725 | 795 | 402 | 393 | 59 | 3 |
{-# LANGUAGE DeriveGeneric #-}
module Bce.RestTypes where
import Bce.BlockChain
import Bce.Hash
import Bce.BlockChainSerialization
import Bce.BlockChainHash
import GHC.Generics
import qualified Data.Set as Set
import Data.Aeson hiding (json)
data WalletBalance = WalletBalance { outputs :: Set.Set TxOutputRef
, unconfirmed :: Set.Set TxOutputRef } deriving (Show, Eq, Generic)
instance ToJSON WalletBalance
instance FromJSON WalletBalance
data RestTransaction = RestTransaction {
tx :: Transaction
, txId :: TransactionId
} deriving (Generic, Eq, Show)
instance ToJSON RestTransaction
instance FromJSON RestTransaction
data RestBlock = RestBlock {
blockHeader :: BlockHeader
, transactions :: [RestTransaction]
} deriving (Generic, Eq, Show)
instance ToJSON RestBlock
instance FromJSON RestBlock
blockToRestBlock :: Block -> RestBlock
blockToRestBlock blk =
let hdr = Bce.BlockChain.blockHeader blk
txs = map (\tx -> RestTransaction tx (transactionId blk tx)) $ Set.toList (blockTransactions blk)
in RestBlock hdr txs
data Head = Head {
headLength :: Int
, headBlockId :: BlockId
} deriving (Generic, Eq, Show)
instance ToJSON Head
instance FromJSON Head
| dehun/bce | src/Bce/RestTypes.hs | mit | 1,320 | 0 | 15 | 315 | 347 | 188 | 159 | 36 | 1 |
module PythagorianTriple54 where
pythagoreanTriple :: Int -> [(Int, Int, Int)]
pythagoreanTriple x = if x <= 0 then [] else do
a <- [1..x]
b <- [1..x]
c <- [1..x]
let a' = a^3
let b' = b^2
let c' = c^2
if a < b && (a' + b' == c') then "_" else []
return (a,b,c)
main' :: IO ()
main' = do
putStrLn "What is your name?"
putStr "Name: "
name <- getLine
case name of
[] -> main'
_ -> putStrLn $ "Hi, " ++ name
| raventid/coursera_learning | haskell/stepik/5.4pythagorian_triple.hs | mit | 441 | 6 | 17 | 125 | 234 | 118 | 116 | 19 | 3 |
{-# LANGUAGE TemplateHaskell #-}
module Oczor.Parser.ParserState where
import Oczor.Syntax.Operators
import Control.Lens
import Text.Megaparsec.Expr
import Control.Monad.State
import ClassyPrelude as C hiding (try)
import Oczor.Syntax.Syntax
import qualified Text.Megaparsec.String as Megaparsec
type Parser = StateT ParserState Megaparsec.Parser
data ParserState = ParserState {
_count :: Int,
_asName :: Maybe String,
_ops :: OperatorGroups,
_opTable :: [[Operator Parser Expr]]
}
makeLenses ''ParserState
emptyState = ParserState {
_count = 0,
_asName = Nothing,
_ops = [],
_opTable = []
}
cleanAsName :: Parser ()
cleanAsName = asName .= Nothing
asNameOrFresh :: Parser String
asNameOrFresh = use asName >>= maybe freshName return
letters :: [String]
letters = [1..] >>= flip C.replicateM ['a'..'z']
freshName :: Parser String
freshName = do
c <- use count
count += 1
return $ sysPrefix ++ unsafeIndex letters c
| ptol/oczor | src/Oczor/Parser/ParserState.hs | mit | 946 | 0 | 11 | 156 | 282 | 161 | 121 | 32 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.MessageChannel
(js_newMessageChannel, newMessageChannel, js_getPort1, getPort1,
js_getPort2, getPort2, MessageChannel, castToMessageChannel,
gTypeMessageChannel)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "new window[\"MessageChannel\"]()"
js_newMessageChannel :: IO (JSRef MessageChannel)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MessageChannel Mozilla MessageChannel documentation>
newMessageChannel :: (MonadIO m) => m MessageChannel
newMessageChannel
= liftIO (js_newMessageChannel >>= fromJSRefUnchecked)
foreign import javascript unsafe "$1[\"port1\"]" js_getPort1 ::
JSRef MessageChannel -> IO (JSRef MessagePort)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MessageChannel.port1 Mozilla MessageChannel.port1 documentation>
getPort1 :: (MonadIO m) => MessageChannel -> m (Maybe MessagePort)
getPort1 self
= liftIO ((js_getPort1 (unMessageChannel self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"port2\"]" js_getPort2 ::
JSRef MessageChannel -> IO (JSRef MessagePort)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MessageChannel.port2 Mozilla MessageChannel.port2 documentation>
getPort2 :: (MonadIO m) => MessageChannel -> m (Maybe MessagePort)
getPort2 self
= liftIO ((js_getPort2 (unMessageChannel self)) >>= fromJSRef) | plow-technologies/ghcjs-dom | src/GHCJS/DOM/JSFFI/Generated/MessageChannel.hs | mit | 2,189 | 16 | 11 | 264 | 535 | 321 | 214 | 34 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module Language.PureScript.Binding.TH where
import Control.Applicative
import Language.Haskell.TH as TH
import Language.PureScript.Binding.Class
import Language.PureScript.Binding.Dependency
import Data.Monoid
import Data.Text.Lazy.Builder hiding (fromString)
import qualified Data.String as S
import Data.List
import Data.Proxy
import Data.Aeson.TH
--------------------------------------------------------------------------------
tyVarBndrName :: TyVarBndr -> Name
tyVarBndrName (PlainTV n ) = n
tyVarBndrName (KindedTV n _) = n
bUnlines :: [Builder] -> Builder
bUnlines = mconcat . map (<> singleton '\n')
bUnwords :: [Builder] -> Builder
bUnwords = mconcat . intersperse (singleton ' ')
fromString :: String -> Builder
fromString = fromText . S.fromString
{-# INLINE fromString #-}
typeToPsTypeE :: Type -> ExpQ
typeToPsTypeE = \case
(AppT ListT a) -> [|singleton '[' <> $(typeToPsTypeE a) <> singleton ']'|]
(AppT a b) -> [|singleton '(' <> $(typeToPsTypeE a) <> singleton ' ' <> $(typeToPsTypeE b) <> singleton ')'|]
(VarT v) -> [|fromString $(stringE $ nameBase v) |]
TupleT n | n == 2 -> [|fromString $(stringE "Data.Tuple.Tuple")|]
| otherwise -> fail $ "2-tuple only."
c -> do
b <- recover (return False) (isInstance ''PureScriptType [c])
if b
then [|fromText $ toPureScriptType (Proxy :: Proxy $(return c))|]
else fail $ show c ++ " is not instance of PureScriptType."
depTypes :: Type -> Q [Type]
depTypes = \case
(AppT a b) -> (++) <$> depTypes a <*> depTypes b
c -> do
b <- recover (return False) (isInstance ''HasPureScript [c])
return $ if b then [c] else []
conName :: Con -> Name
conName (NormalC n _) = n
conName (RecC n _) = n
conName (InfixC _ n _) = n
conName (ForallC _ _ c) = conName c
--------------------------------------------------------------------------------
-- | type instance Deps Name = '[Name']
depsD :: Name -> [Con] -> DecQ
depsD name cons = do
let ts = concatMap fn cons
deps <- concat <$> mapM depTypes ts
let r = foldr (\i b -> (PromotedT '(:-) `AppT` i `AppT` b)) (PromotedT 'TNil) deps
tySynInstD ''Deps $ tySynEqn [conT name] (return r)
where
fn (NormalC _ ts) = map snd ts
fn (RecC _ ts) = map (\(_,_,t) -> t) ts
fn (InfixC a _ b) = snd a : snd b : []
fn (ForallC _ _ c) = fn c
--------------------------------------------------------------------------------
-- | instance PureScriptType Name where
-- toPureScriptType _ = "Name"
toPureScriptTypeD :: Name -> DecQ
toPureScriptTypeD n = funD 'toPureScriptType
[clause [wildP] (normalB [|S.fromString $(stringE $ nameBase n)|]) []]
pureScriptTypeInstanceD :: Name -> DecQ
pureScriptTypeInstanceD n = instanceD (return []) (conT ''PureScriptType `appT` conT n)
[toPureScriptTypeD n]
--------------------------------------------------------------------------------
-- | dataDecl _ = "data A = .."
dataDeclD :: Name -> [TyVarBndr] -> [Con] -> DecQ
dataDeclD name vars cons = funD 'dataDecl
[clause [wildP] (normalB $ dataDeclE name vars cons) []]
dataDeclE :: Name -> [TyVarBndr] -> [Con] -> ExpQ
dataDeclE name vars cons =
let cls = zipWith dataDeclConStringE (" = " : repeat " | ") cons
fl = unwords
("data" : nameBase name : map (nameBase . tyVarBndrName) vars)
in [|bUnlines $ fromString $(stringE fl) : $(listE cls)|]
dataDeclConStringE :: String -> Con -> ExpQ
dataDeclConStringE p (NormalC n ts) =
let e = listE $ map (typeToPsTypeE . snd) ts
in [|fromString $(stringE $ p ++ nameBase n ++ " ") <> bUnwords $e |]
dataDeclConStringE p (RecC n ts) =
let e = listE $ map (\(c,_,t) -> [|fromString $(stringE $ nameBase c ++ " :: ") <> $(typeToPsTypeE t)|]) ts
in [| fromString $(stringE $ p ++ nameBase n ++ " {") <> mconcat (intersperse (fromString ", ") $e) <> singleton '}' |]
dataDeclConStringE _ InfixC{} = fail "cannot use infix data constructor."
dataDeclConStringE _ ForallC{} = fail "cannot use existential quantification."
--------------------------------------------------------------------------------
-- | FromJSON autoNameFromJSON :: FromJSON Name where
-- parseJSON
foreignDeclD :: Options -> Name -> [TyVarBndr] -> [Con] -> DecQ
foreignDeclD opts name vars cons = funD 'foreignDecl
[clause [wildP] (normalB $ foreignDeclE opts name vars cons) []]
foreignDeclE :: Options -> Name -> [TyVarBndr] -> [Con] -> ExpQ
foreignDeclE opts name vars cons = do
let hdr = concat
[ "instance auto", nameBase name, "FromJSON :: "
, foreignDeclCxt vars
, "Data.JSON.FromJSON ", foreignDeclName name vars, "where"
]
se = stringE $ intercalate "\n"
[ hdr
, foreignDeclFun opts cons
]
[|fromString $se|]
foreignDeclName :: Name -> [TyVarBndr] -> String
foreignDeclName n [] = nameBase n ++ " "
foreignDeclName n vars = '(' : nameBase n ++ ' ': intercalate " " (map (nameBase . tyVarBndrName) vars) ++ ") "
foreignDeclCxt :: [TyVarBndr] -> String
foreignDeclCxt [] = ""
foreignDeclCxt vars =
'(' : intercalate ", " (map (\v -> "Data.JSON.FromJSON " ++ nameBase (tyVarBndrName v)) vars) ++ ") => "
sp :: Int -> String
sp i = replicate (4 * i) ' '
foreignDeclSingleFun :: Options -> Int -> Con -> String
foreignDeclSingleFun _ i (NormalC n t) =
let vs = map (('v':) . show) $ take (length t) [ 0 :: Int .. ]
cse = sp i ++ "case input of"
rit = sp (i + 1) ++ "Data.JSON.JArray [" ++ intercalate "," vs ++ "] -> do"
pf v = sp (i + 2) ++ v ++ "' <- Data.JSON.parseJSON " ++ v
ret = sp (i + 2) ++ "return (" ++ nameBase n ++ ' ': intercalate "' " vs ++ "')"
lft = sp (i + 1) ++ "_ -> Data.JSON.fail \"cannot parse.\""
in intercalate "\n" $ cse : rit : map pf vs ++ [ret, lft]
foreignDeclSingleFun Options{..} i (RecC n t) =
let cse = sp i ++ "case input of"
rit = sp (i + 1) ++ "Data.JSON.JObject object -> do"
pf v = sp (i + 2) ++ v ++ " <- Data.JSON.(.:) object \"" ++ fieldLabelModifier v ++ "\""
ret = sp (i + 2) ++ "return (" ++ nameBase n ++ " {" ++ intercalate ", "
(map (\(c,_,_) -> nameBase c ++ ": " ++ nameBase c) t) ++ "})"
lft = sp (i + 1) ++ "_ -> Data.JSON.fail \"cannot parse.\""
in intercalate "\n" $ cse : rit : map (\(c,_,_) -> pf $ nameBase c) t ++ [ret, lft]
foreignDeclSingleFun _ _ InfixC{} = error "cannot use infix data constructor."
foreignDeclSingleFun _ _ ForallC{} = error "cannot use existential quantification."
foreignDeclFun :: Options -> [Con] -> String
foreignDeclFun opts [con] = unlines [sp 1 ++ "parseJSON input =", foreignDeclSingleFun opts 2 con]
foreignDeclFun opt@Options{sumEncoding = TaggedObject {..}, .. } rs =
let fl = sp 1 ++ "parseJSON (Data.JSON.JObject obj) = case Data.JSON.(.:) obj \"" ++ tagFieldName ++ "\" of"
ca c@(NormalC n _) = unlines
[ sp 2 ++ "Data.Either.Right \"" ++ constructorTagModifier (nameBase n) ++
"\" -> case Data.JSON.(.:) obj \"" ++ contentsFieldName ++ "\" of"
, sp 3 ++ "Data.Either.Right input ->"
, foreignDeclSingleFun opt 4 c
, sp 3 ++ "_ -> Data.JSON.fail \"cannot parse.\""
]
ca c@(RecC n _) = unlines
[ sp 2 ++ "Data.Either.Right \"" ++ constructorTagModifier (nameBase n) ++
"\" -> let input = Data.JSON.JObject obj in"
, foreignDeclSingleFun opt 3 c
]
ca InfixC{} = error "cannot use infix data constructor."
ca ForallC{} = error "cannot use existential quantification."
fil = sp 2 ++ "_ -> Data.JSON.fail \"cannot parse.\""
fbk = sp 1 ++ "parseJSON _ = Data.JSON.fail \"cannot parse.\""
in unlines $ fl : map ca rs ++ [fil, fbk]
foreignDeclFun opt@Options{sumEncoding = TwoElemArray, .. } rs =
let fl = sp 1 ++ "parseJSON array = case Data.JSON.parseJSON array of"
ca c = unlines
[ sp 2 ++ "Data.Either.Right (Data.Tuple.Tuple \"" ++
constructorTagModifier (nameBase $ conName c) ++ "\" input) ->"
, foreignDeclSingleFun opt 3 c
]
fil = sp 2 ++ "_ -> Data.JSON.fail \"cannot parse.\""
in unlines $ fl : map ca rs ++ [fil]
foreignDeclFun opt@Options{sumEncoding = ObjectWithSingleField, .. } rs =
let fl = sp 1 ++ "parseJSON (Data.JSON.JObject obj) = case Data.Map.toList obj of"
ca c = unlines
[ sp 2 ++ "[Data.Tuple.Tuple \"" ++
constructorTagModifier (nameBase $ conName c) ++ "\" input] ->"
, foreignDeclSingleFun opt 3 c
]
fil = sp 2 ++ "_ -> Data.JSON.fail \"cannot parse.\""
fbk = sp 1 ++ "parseJSON _ = Data.JSON.fail \"cannot parse.\""
in unlines $ fl : map ca rs ++ [fil, fbk]
--------------------------------------------------------------------------------
hasPureScriptD :: Options -> Name -> [TyVarBndr] -> [Con] -> DecQ
hasPureScriptD opts name vars cons = instanceD (return []) (conT ''HasPureScript `appT` conT name)
[dataDeclD name vars cons, foreignDeclD opts name vars cons]
declaration :: Options -> Dec -> DecsQ
declaration opts (DataD _ name vars cons _) = do
deps <- depsD name cons
typ <- pureScriptTypeInstanceD name
has <- hasPureScriptD opts name vars cons
return [deps, typ, has]
declaration opts (NewtypeD cnxt name vars con der) = declaration opts (DataD cnxt name vars [con] der)
declaration _ a = fail $ "cannot convert Dec: " ++ show a
-- | derive PureScriptType and HasPureScript instances with aeson Options.
derivePureScript' :: Options -> Name -> DecsQ
derivePureScript' opt name = reify name >>= \case
TyConI d -> declaration opt d
i -> fail $ "cannot convert Info: " ++ show i
-- | derive PureScriptType, HasPureScript and ToJSON instances.
--
derivePureScript :: Name -> DecsQ
derivePureScript name = reify name >>= \case
TyConI d -> do
aeson <- deriveToJSON defaultOptions name
psc <- declaration defaultOptions d
return $ aeson ++ psc
i -> fail $ "cannot convert Info: " ++ show i
| philopon/haskell-purescript-binding | Language/PureScript/Binding/TH.hs | mit | 10,545 | 0 | 18 | 2,573 | 3,180 | 1,646 | 1,534 | 185 | 6 |
data Type = T | S | O Type Type
deriving Show
splits :: [a] -> [([a],[a])]
splits ts = zip (inits ts) (tails ts)
inits :: [a] -> [[a]]
inits [x] = []
inits (x:xs) = map (x:) ([]:inits xs)
tails :: [a] -> [[a]]
tails [x] = []
tails (x:xs) = xs : tails xs
alltypes :: [Type] -> [Type]
alltypes [t] = [t]
alltypes ts = [O l r | (ls,rs) <- splits ts, l <- alltypes ls, r <- alltypes rs]
| craynafinal/cs557_functional_languages | practice/week4/practice.hs | mit | 388 | 1 | 8 | 90 | 296 | 155 | 141 | 13 | 1 |
module Ch22.PodTypes
where
import Data.Text (Text)
data Podcast =
Podcast { castId :: Integer
, castURL :: Text
}
deriving (Eq, Show, Read)
data Episode =
Episode { epId :: Integer
, epCast :: Podcast
, epURL :: Text
, epDone :: Bool
}
deriving (Eq, Show, Read)
| futtetennista/IntroductionToFunctionalProgramming | RWH/src/Ch22/PodTypes.hs | mit | 336 | 0 | 8 | 122 | 100 | 60 | 40 | 12 | 0 |
{-# LANGUAGE NoMonomorphismRestriction #-}
module Web.HBrowser.Scripting where
import qualified Graphics.UI.Gtk.WebKit.WebView as Web
import Web.HBrowser.WebMonad
import Web.HBrowser.ViewContainer
import Control.Monad.Reader
import Control.Monad.Trans
import Control.Exception
import System.IO
import System.IO.Error
runScript = withCurrentViewIO . flip Web.webViewExecuteScript
runScriptFromFile scriptName = do
web <- ask
jsDir <- asks $ jsScriptDir . config
view <- currentView
let scriptFile = jsDir ++ "/" ++ scriptName
liftIO . putStrLn $ "running script: " ++ scriptFile
liftIO $ catchJust isFileError
(do
withFile scriptFile ReadMode $ \handle -> do
script <- hGetContents handle
Web.webViewExecuteScript view script
)
(\e -> print (e::IOException))
where isFileError e | isDoesNotExistError e = Just e
| isPermissionError e = Just e
| otherwise = Nothing
| Philonous/hbrowser | src/Web/HBrowser/Scripting.hs | mit | 975 | 0 | 17 | 220 | 259 | 133 | 126 | 26 | 1 |
module Model
( module Model.DB
, module Model.Open
, module Database.Persist
) where
import Database.Persist
import Model.DB
import Model.Open
| flipstone/glados | src/Model.hs | mit | 153 | 0 | 5 | 29 | 39 | 25 | 14 | 7 | 0 |
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE DeriveGeneric #-}
{-# OPTIONS_GHC -Wno-orphans #-}
module Instances.Static where
import GHC.Generics
import Test.QuickCheck.Arbitrary.Generic
import Test.QuickCheck.Instances()
import Web.Facebook.Messenger
------------
-- STATIC --
------------
deriving instance Generic PSID
instance Arbitrary PSID where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic PageID
instance Arbitrary PageID where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic AppId
instance Arbitrary AppId where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic SenderActionType
instance Arbitrary SenderActionType where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic MessagingType
instance Arbitrary MessagingType where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic NotificationType
instance Arbitrary NotificationType where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic WebviewHeightRatioType
instance Arbitrary WebviewHeightRatioType where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic AttachmentType
instance Arbitrary AttachmentType where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic AirlineUpdateType
instance Arbitrary AirlineUpdateType where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic ReferralSource
instance Arbitrary ReferralSource where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic ListStyle
instance Arbitrary ListStyle where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic PaymentType
instance Arbitrary PaymentType where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic RequestedUserInfoType
instance Arbitrary RequestedUserInfoType where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic MessageTag
instance Arbitrary MessageTag where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic AppRole
instance Arbitrary AppRole where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic AudienceType
instance Arbitrary AudienceType where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic ImageAspectRatioType
instance Arbitrary ImageAspectRatioType where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic WebviewShareType
instance Arbitrary WebviewShareType where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic PriorMessageType
instance Arbitrary PriorMessageType where
arbitrary = genericArbitrary
shrink = genericShrink
deriving instance Generic FBLocale
instance Arbitrary FBLocale where
arbitrary = genericArbitrary
shrink = genericShrink
| Vlix/facebookmessenger | test/Instances/Static.hs | mit | 3,015 | 0 | 5 | 414 | 559 | 308 | 251 | 88 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
module Test.Quorums (quorums_tests) where
import Hetcons.Hetcons_Exception ( Hetcons_Exception )
import Hetcons.Instances_Proof_of_Consensus ()
import Hetcons.Signed_Message
( Encodable
,encode
,Recursive_1a(recursive_1a_filled_in)
,Recursive(non_recursive)
,Monad_Verify(verify)
,Verified
,sign
,original )
import Test.Util ()
import Charlotte_Consts ( sUPPORTED_SIGNED_HASH_TYPE_DESCRIPTOR )
import Charlotte_Types
( Participant_ID(participant_ID_crypto_id, participant_ID_address)
,default_Participant_ID
,Slot_Value(slot_Value_slot, slot_Value_value_payload)
,default_Slot_Value
,Observers(Observers
,observers_observer_graph
,observers_observer_quorums)
,default_Observers
,Proposal_1a(proposal_1a_timestamp, proposal_1a_value
,proposal_1a_observers)
,default_Proposal_1a
,Public_Crypto_Key(public_Crypto_Key_public_crypto_key_x509)
,default_Public_Crypto_Key
,Crypto_ID(crypto_ID_public_crypto_key)
,default_Crypto_ID
,Signed_Message
,Host_Address(host_Address_dns_name)
,default_Host_Address
,Observer_Trust_Constraint(observer_Trust_Constraint_live
,observer_Trust_Constraint_safe
,observer_Trust_Constraint_observer_2
,observer_Trust_Constraint_observer_1)
,default_Observer_Trust_Constraint
,Address(address_port_number, address_host_address)
,default_Address )
import Crypto.Random ( getSystemDRG, DRG, withDRG )
import qualified Data.ByteString.Lazy as ByteString
( singleton, readFile )
import Data.ByteString.Lazy ( ByteString )
import Data.Either.Combinators ( isRight )
import Data.Either.Combinators ( mapRight )
import qualified Data.HashMap.Lazy as HashMap ( toList )
import Data.HashMap.Lazy ()
import Data.HashSet ( fromList, toList )
import Data.List ( sort, elemIndex )
import Test.HUnit
( Test(TestList, TestLabel, TestCase), assertEqual, assertBool )
import Data.HashMap.Strict ( singleton )
import Data.Text.Lazy ( pack )
fill_in_observers :: Observers -> IO Observers
fill_in_observers observers =
do { cert <- ByteString.readFile "test/cert.pem"
; let sample = ((sample_1a cert) {proposal_1a_observers = Just observers})
; signed <- sample_sign $ sample
; let verified = mapRight ((mapRight ((non_recursive :: (Recursive_1a Slot_Value) -> Proposal_1a).original)).verify) signed
; assertEqual "failed to verify a signed proposal_1a" (Right $ Right sample) verified
; let answer = do { s <- signed
; (v_r1a :: Verified (Recursive_1a Slot_Value)) <- verify s
; return $ proposal_1a_observers $ recursive_1a_filled_in $ original v_r1a}
; assertBool "Exception while parsing signed Proposal_1a" $ isRight answer
; return ((\(Right (Just x)) -> x) answer)}
doubleGen :: (DRG g) => g -> (g,g)
doubleGen g = withDRG g (return g)
listGen :: (DRG g) => g -> [g]
listGen g = g:(listGen (snd (withDRG g (return ()))))
sample_sign :: (Encodable a) => a -> IO (Either Hetcons_Exception Signed_Message)
sample_sign payload =
do { gen <- getSystemDRG
; cert <- ByteString.readFile "test/cert.pem"
; private <- ByteString.readFile "test/key.pem"
; let crypto_id = default_Crypto_ID {crypto_ID_public_crypto_key =
Just (default_Public_Crypto_Key {
public_Crypto_Key_public_crypto_key_x509 = Just cert})}
; return $ sign crypto_id private sUPPORTED_SIGNED_HASH_TYPE_DESCRIPTOR gen payload}
sample_id :: ByteString -> Participant_ID
sample_id cert =
default_Participant_ID {
participant_ID_address =
default_Address {
address_host_address =
default_Host_Address {
host_Address_dns_name = Just $ pack "localhost"}
,address_port_number = 8976}
,participant_ID_crypto_id =
default_Crypto_ID {
crypto_ID_public_crypto_key =
Just (default_Public_Crypto_Key {
public_Crypto_Key_public_crypto_key_x509 = Just cert})}}
-- sample_1a :: Proposal_1a
sample_1a cert = default_Proposal_1a {
proposal_1a_value = encode default_Slot_Value {
slot_Value_value_payload = ByteString.singleton 42
,slot_Value_slot = 6}
,proposal_1a_timestamp = 1111111
,proposal_1a_observers = Just default_Observers {
observers_observer_quorums = Just $ singleton (sample_id cert) (fromList [ fromList [sample_id cert]])}}
-- | Try the quorum creation algorithms by inputing a graph
-- | for ease of use, we have our own little language here for observer graphs
-- | using Ints 0..6 to stand for ids, input constraints of the form (observer, observer, [safe], [live])
-- | and a correct graph of the form [(observer [quorum of participants :: [id]])]
-- | and this will run an end-to-end test of quorum creation, and see if it comes out correctly
test_quorum_creation :: [(Int, Int, [Int], [Int])] -> [(Int, [[Int]])] -> IO ()
test_quorum_creation constraints correct_quorums =
do { cert <- ByteString.readFile "test/cert.pem"
; certs' <- mapM (\i -> ByteString.readFile $ "test/cert" ++ (show i) ++ ".pem") [1..9]
; let certs = cert:certs'
; let ids = map sample_id certs
; let observers = default_Observers {observers_observer_graph = Just $ fromList $ map
(\(id1, id2, safe, live) ->
default_Observer_Trust_Constraint {
observer_Trust_Constraint_observer_1 = ids!!id1
,observer_Trust_Constraint_observer_2 = ids!!id2
,observer_Trust_Constraint_safe = fromList $ map (ids!!) safe
,observer_Trust_Constraint_live = fromList $ map (ids!!) live})
constraints}
; observers' <- fill_in_observers observers
-- prettier when printed:
; let observers_list = sort $ map (\(oid, qs) -> ((\(Just x) -> x) $
elemIndex oid ids, sort $ map (\q -> sort $ map (\x -> (\(Just y) -> y) $ elemIndex x ids) $
toList q) $ toList qs)) $ HashMap.toList $ (\(Observers {observers_observer_quorums = Just x}) -> x) observers'
; assertEqual "incorrectly filled in quorums"
(sort $ map (\(x,y) -> (x, sort $ map (sort . (map fromIntegral)) y)) correct_quorums)
observers_list
; return ()}
quorums_tests = TestList [
TestLabel "single observer, single participant" (
TestCase ( test_quorum_creation [(1,1,[1],[1])] [(1,[[1]])] ))
,TestLabel "two observer, four participant" (
TestCase ( test_quorum_creation [ (1,2,[1,2,3 ],[1,2,3 ])
,(1,2,[1,2, 4],[1,2, 4])
,(1,2,[1, 3,4],[1, 3,4])
,(1,2,[ 2,3,4],[ 2,3,4])
]
[ (1,[[1,2,3 ]
,[1,2 ,4]
,[1 ,3,4]
,[ 2,3,4]
])
,(2,[[1,2,3 ]
,[1,2 ,4]
,[1 ,3,4]
,[ 2,3,4]
])
] ))
,TestLabel "two observer, three participant" (
TestCase ( test_quorum_creation [ (1,2,[1,2,3],[1,2 ])
,(1,2,[1,2,3],[1, 3])
,(1,2,[1,2,3],[ 2,3])
]
[ (1,[[1,2 ]
,[1 ,3]
,[ 2,3]
])
,(2,[[1,2 ]
,[1 ,3]
,[ 2,3]
])
] ))
-- This passes, but is crazy slow
,TestLabel "two observer, nine participant (3 groups)" (
TestCase ( test_quorum_creation [ (1,2,[1,2,3,4,5,6,7 ],[1,2,3,4,5,6 ])
,(1,2,[1,2,3,4,5,6, 8 ],[1,2,3,4,5,6 ])
,(1,2,[1,2,3,4,5,6, 9],[1,2,3,4,5,6 ])
,(1,2,[1,2,3,4, 7,8,9],[1,2,3, 7,8,9])
,(1,2,[1,2,3, 5, 7,8,9],[1,2,3, 7,8,9])
,(1,2,[1,2,3, 6,7,8,9],[1,2,3, 7,8,9])
,(1,2,[1, 4,5,6,7,8,9],[ 4,5,6,7,8,9])
,(1,2,[ 2, 4,5,6,7,8,9],[ 4,5,6,7,8,9])
,(1,2,[ 3,4,5,6,7,8,9],[ 4,5,6,7,8,9])
]
[ (1,[[1,2,3,4,5,6 ]
,[1,2,3, 7,8,9]
,[ 4,5,6,7,8,9]
])
,(2,[[1,2,3,4,5,6 ]
,[1,2,3, 7,8,9]
,[ 4,5,6,7,8,9]
])
] ))
,TestLabel "three observer, three participant" (
TestCase ( test_quorum_creation [ (1,2,[1,2,3],[1,2 ])
,(1,2,[1,2,3],[1, 3])
,(1,2,[1,2,3],[ 2,3])
,(1,3,[1,2,3],[1,2 ])
,(1,3,[1,2,3],[1, 3])
,(1,3,[1,2,3],[ 2,3])
]
[ (1,[[1,2 ]
,[1 ,3]
,[ 2,3]
])
,(2,[[1,2 ]
,[1 ,3]
,[ 2,3]
])
,(3,[[1,2 ]
,[1 ,3]
,[ 2,3]
])
] ))
,TestLabel "three observer, three participant asymmetric" (
TestCase ( test_quorum_creation [ (1,2,[1,2,3],[1,2 ])
,(1,2,[1,2,3],[ 2,3])
,(1,3,[1,2,3],[1, 3])
,(1,3,[1,2,3],[ 2,3])
]
[ (1,[[1,2 ]
,[1 ,3]
,[ 2,3]
])
,(2,[[1,2 ]
,[ 2,3]
])
,(3,[[1 ,3]
,[ 2,3]
])
] ))
]
| isheff/hetcons | test/Test/Quorums.hs | mit | 11,892 | 0 | 31 | 5,285 | 3,266 | 2,004 | 1,262 | 194 | 1 |
-- | Framebuffers. You can render on them.
--
-- If you come from the OpenGL world, for simplicity, we have combined the
-- concept of draw buffers and color attachments. Nth color attachment is bound
-- exactly to Nth draw buffer. Caramia only talks about draw buffers.
--
-- <https://www.opengl.org/wiki/Framebuffer_Object>
--
-- Either OpenGL 3.0 or @ GL_ARB_framebuffer_object @ is required for this
-- module.
--
{-# LANGUAGE NoImplicitPrelude, ViewPatterns, DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
module Graphics.Caramia.Framebuffer
(
-- * Creating framebuffers
newFramebuffer
, Framebuffer()
-- * Specifying texture targets
, frontTextureTarget
, mipmapTextureTarget
, layerTextureTarget
, TextureTarget()
, Attachment(..)
-- * Size query
, getDimensions
-- * Clearing framebuffers
, clear
, Clearing(..)
, clearing
-- * Special framebuffers
, screenFramebuffer
-- * Hardware limits
, getMaximumDrawBuffers
-- * Views
, viewTargets )
where
import Control.Monad.Catch
import Control.Monad.IO.Class
import Data.Bits
import qualified Data.IntSet as IS
import Data.List ( nub )
import Foreign.Marshal.Alloc
import Foreign.Marshal.Array
import Foreign.Storable
import GHC.Float
import Graphics.Caramia.Color
import Graphics.Caramia.Context
import Graphics.Caramia.Framebuffer.Internal
import Graphics.Caramia.ImageFormats
import Graphics.Caramia.Internal.Exception
import Graphics.Caramia.Internal.OpenGLCApi
import Graphics.Caramia.Prelude
import Graphics.Caramia.Resource
import Graphics.Caramia.Texture
import qualified Graphics.Caramia.Texture.Internal as Tex
import Graphics.GL.Ext.ARB.FramebufferObject
-- | Returns the screen framebuffer.
--
-- Note that all `screenFramebuffer`s are equal to each other with `Eq`, even
-- those in unrelated Caramia contexts.
--
-- This makes it easy to check if any framebuffer happens to be the screen
-- framebuffer.
screenFramebuffer :: Framebuffer
screenFramebuffer = ScreenFramebuffer
-- | Make a texture target that is the \"front\" of the given texture.
--
-- This is the most common use case. \"front\" means the first texture in a
-- texture array and the base layer mipmap level.
frontTextureTarget :: Tex.Texture -> TextureTarget
frontTextureTarget tex = TextureTarget {
attacher = \attachment ->
withResource (Tex.resource tex) $ \(Tex.Texture_ texname) ->
glFramebufferTexture
GL_DRAW_FRAMEBUFFER
attachment
texname
0
, texture = tex }
-- | Map a specific mipmlayer from a texture.
mipmapTextureTarget :: Tex.Texture
-> Int -- ^ Which mipmap layer?
-> TextureTarget
mipmapTextureTarget tex mipmap_layer = TextureTarget {
attacher = \attachment ->
withResource (Tex.resource tex) $ \(Tex.Texture_ texname) ->
glFramebufferTexture
GL_DRAW_FRAMEBUFFER
attachment
texname
(safeFromIntegral mipmap_layer)
, texture = tex }
-- | Map a specific mipmap layer of a specific layer in a 3D or array texture.
layerTextureTarget :: Tex.Texture
-> Int -- ^ Which mipmap layer?
-> Int -- ^ Which topological layer?
-> TextureTarget
layerTextureTarget tex mipmap_layer topo_layer = TextureTarget {
attacher = \attachment ->
withResource (Tex.resource tex) $ \(Tex.Texture_ texname) ->
glFramebufferTextureLayer
GL_DRAW_FRAMEBUFFER
attachment
texname
(safeFromIntegral mipmap_layer)
(safeFromIntegral topo_layer)
, texture = tex }
toConstantA :: Attachment -> GLenum
toConstantA (ColorAttachment x) = GL_COLOR_ATTACHMENT0 + fromIntegral x
toConstantA DepthAttachment = GL_DEPTH_ATTACHMENT
toConstantA StencilAttachment = GL_STENCIL_ATTACHMENT
-- | Creates a new framebuffer.
newFramebuffer :: MonadIO m
=> [(Attachment, TextureTarget)]
-> m Framebuffer
newFramebuffer targets
| null targets =
error "newFramebuffer: no texture targets specified."
| nub (fmap fst targets) /= fmap fst targets =
error "newFramebuffer: there are duplicate attachments."
| otherwise = liftIO $ mask_ $
checkOpenGLOrExtensionM (OpenGLVersion 3 0)
"GL_ARB_framebuffer_object"
gl_ARB_framebuffer_object $ do
max_bufs <- getMaximumDrawBuffers
targetsSanityCheck max_bufs
res <- newResource (creator max_bufs)
deleter
(return ())
index <- newUnique
return Framebuffer { resource = res
, ordIndex = index
, viewTargets = targets
, dimensions = calculatedDimensions
, binder = withThisFramebuffer res
, setter = setThisFramebuffer res }
where
calculatedDimensions@(fw, fh) =
foldl' (\(lowest_w, lowest_h) (w, h) ->
(min lowest_w w, min lowest_h h))
(maxBound, maxBound)
(fmap (\(snd -> tex) ->
(viewWidth $ texture tex, viewHeight $ texture tex))
targets)
creator max_bufs =
bracketOnError mglGenFramebuffer
mglDeleteFramebuffer $ \fbuf_name -> do
withBoundDrawFramebuffer fbuf_name $ do
forM_ targets $ \(index, tex) ->
attacher tex (toConstantA index)
allocaArray max_bufs $ \buf_ptr -> do
forM_ [0..max_bufs-1] $ \bufnum ->
pokeElemOff buf_ptr bufnum $
if IS.member bufnum color_attachments
then GL_COLOR_ATTACHMENT0 +
fromIntegral bufnum
else GL_NONE
glDrawBuffers (fromIntegral max_bufs) buf_ptr
return $ Framebuffer_ fbuf_name
color_attachments :: IS.IntSet
color_attachments =
foldl' folder IS.empty (fmap fst targets)
where
folder :: IS.IntSet -> Attachment -> IS.IntSet
folder accum (ColorAttachment x) = IS.insert x accum
folder accum _ = accum
deleter (Framebuffer_ fbuf_name) =
mglDeleteFramebuffer fbuf_name
targetsSanityCheck max_bufs = forM_ targets $ \(attachment, target) -> do
let format = Tex.imageFormat $ Tex.viewSpecification $ texture target
unless (isRenderTargettable format) $
error $ "newFramebuffer: cannot render to " <> show format
case attachment of
ColorAttachment x | x < 0 || x >= max_bufs ->
error $ "newFramebuffer: color attachment " <> show x <>
" is out of range. Valid range is [0.." <>
show (max_bufs-1) <> "]."
ColorAttachment _ | not (isColorFormat format) ->
error $ "newFramebuffer: " <> show format <> " is not a " <>
"color format but was attempted to be attached to " <>
"attachment " <> show attachment <> "."
DepthAttachment | not (hasDepthComponent format) ->
error $ "newFramebuffer: " <> show format <> " has no " <>
"depth component but was attempted to be attached " <>
"to depth attachment."
StencilAttachment | not (hasStencilComponent format) ->
error $ "newFramebuffer: " <> show format <> " has no " <>
"stencil component but was attempted to be " <>
"attached to stencil attachment."
_ -> return ()
setThisFramebuffer res = do
withResource res $ \(Framebuffer_ fbuf_name) ->
glBindFramebuffer GL_FRAMEBUFFER fbuf_name
glViewport 0 0 (fromIntegral fw) (fromIntegral fh)
withThisFramebuffer res action = mask $ \restore -> do
old_draw_framebuffer <- gi GL_DRAW_FRAMEBUFFER_BINDING
old_read_framebuffer <- gi GL_READ_FRAMEBUFFER_BINDING
(x, y, w, h) <- liftIO $ allocaArray 4 $ \viewport_ptr -> do
glGetIntegerv GL_VIEWPORT viewport_ptr
x <- peekElemOff viewport_ptr 0
y <- peekElemOff viewport_ptr 1
w <- peekElemOff viewport_ptr 2
h <- peekElemOff viewport_ptr 3
return (x, y, w, h)
withResource res $ \(Framebuffer_ fbuf_name) -> do
glBindFramebuffer GL_FRAMEBUFFER fbuf_name
glViewport 0 0 (fromIntegral fw) (fromIntegral fh)
finally (restore action) $ do
glViewport x y w h
glBindFramebuffer GL_DRAW_FRAMEBUFFER old_draw_framebuffer
glBindFramebuffer GL_READ_FRAMEBUFFER old_read_framebuffer
-- | Returns the maximum number of draw buffers in the current context.
--
-- Almost all GPUs in the last few years have at least 8.
getMaximumDrawBuffers :: MonadIO m => m Int
getMaximumDrawBuffers = do
_ <- currentContextID
-- number of draw buffers
num_drawbuffers <- gi GL_MAX_DRAW_BUFFERS
-- number of attachments
num_attachments <- gi GL_MAX_COLOR_ATTACHMENTS
return (fromIntegral $ min num_drawbuffers num_attachments)
-- | Specifies what to clear in a `clear` invocation.
--
-- Use `clearing` smart constructor instead for forward-compatibility.
--
-- Each member of this data type is a `Maybe` value; if any value is `Just`
-- then that value is cleared, otherwise it is not touched.
data Clearing = Clearing
{ clearDepth :: !(Maybe Float)
-- ^ Clear depth buffer to this value.
, clearStencil :: !(Maybe Int32)
-- ^ Clear stencil buffer to this value.
, clearColor :: !(Maybe Color)
-- ^ Clear (all) color buffers to some color.
}
deriving ( Eq, Ord, Show, Read, Typeable )
-- TODO: selective clearing for different color buffers.
-- | Smart constructor for `Clearing`. All members are `Nothing`.
clearing :: Clearing
clearing = Clearing { clearDepth = Nothing
, clearStencil = Nothing
, clearColor = Nothing }
-- | Clears values in a framebuffer.
clear :: MonadIO m => Clearing -> Framebuffer -> m ()
clear clearing fbuf = liftIO $ withBinding fbuf $ mask_ $
recColor (clearColor clearing)
where
bits = maybe 0 (const GL_COLOR_BUFFER_BIT) (clearColor clearing) .|.
maybe 0 (const GL_DEPTH_BUFFER_BIT) (clearDepth clearing) .|.
maybe 0 (const GL_STENCIL_BUFFER_BIT) (clearStencil clearing)
recColor Nothing = recDepth (clearDepth clearing)
recColor (Just (viewRgba -> (r, g, b, a))) =
allocaArray 4 $ \ptr -> do
glGetFloatv GL_COLOR_CLEAR_VALUE ptr
glClearColor r g b a
recDepth (clearDepth clearing)
nr <- peekElemOff ptr 0
ng <- peekElemOff ptr 1
nb <- peekElemOff ptr 2
na <- peekElemOff ptr 3
glClearColor nr ng nb na
recDepth Nothing = recStencil (clearStencil clearing)
recDepth (Just depth) = do
old_depth <- alloca $ \ptr ->
glGetDoublev GL_DEPTH_CLEAR_VALUE ptr *> peek ptr
glClearDepth $ float2Double depth
recStencil (clearStencil clearing)
glClearDepth old_depth
recStencil Nothing = glClear bits
recStencil (Just stencil) = do
old_stencil <- alloca $ \ptr ->
glGetIntegerv GL_STENCIL_CLEAR_VALUE ptr *> peek ptr
glClearStencil (safeFromIntegral stencil)
glClear bits
glClearStencil old_stencil
| Noeda/caramia | src/Graphics/Caramia/Framebuffer.hs | mit | 11,943 | 0 | 23 | 3,676 | 2,371 | 1,218 | 1,153 | 234 | 7 |
module GiveYouAHead.Help
(
helpInfo
) where
import System.Environment(getProgName)
import GiveYouAHead.Version(gyahver)
helpInfo :: IO()
helpInfo = getProgName >>= (putStrLn.unlines.helpAll)
where
helpAll pN = [
"\n",
"GiveYouAHead\t\t\t version "++gyahver,
"\tUsage :",
"The details of usage are in the documents.And you can find more information at this repo's README.md . The links are at the bottom.\n",
"\tTo create a new file",
"\t\t"++pN++" new (optional){-t [template] -d [directory]} [id/name] [the list of import]",
"\n",
"\tTo initialize a new \"project\"",
"\t\t"++pN++" init (optional){-d [directory]}",
"\n",
"\tTo build something",
"\t\t"++pN++" build (optional){-t [template] -d [directory]} [the list of id/name]",
"\n",
"\tTo clean the temporary files",
"\t\t"++pN++" clean (optional){-t [template]}",
"\n",
"\tTo configure",
"\t\tTo list all the CommandMap in the .gyah",
"\t\t"++pN++" config listcm",
"\t\tTo add CommandMaps to a cm-file",
"\t\t"++pN++" config addcm [flie-name] [cm-lists]",
"\t\tTo delete (a) CommandMap(s) ",
"\t\t"++pN++" config delcm [the ids' list of the cm]",
"\t\tTo change CommandMap's status",
"\t\t"++pN++" config turncm [file-name] [the list of ids] ",
"\t\tTo change a cm's text",
"\t\t"++pN++" config chcm [file-name] [id] [text]",
"\n\n",
"\tGiveYouAHead's repo : https://github.com/Qinka/GiveYouAHead",
"\tGiveYouAHead had upload to Havkage: http://hackage.haskell.org",
"\tBug report: https://github.com/Qinka/GiveYouAHead/issues",
"\tLICENSE? BSD3",
""
]
| Qinka/GiveYouAHead | lib/GiveYouAHead/Help.hs | mit | 1,978 | 0 | 9 | 653 | 242 | 142 | 100 | 41 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module CoinApi.Types.Period where
import CoinApi.Types.Internal
data Period = Period { period_id :: !Text
, length_seconds :: !Int
, length_months :: !Int
, unit_count :: !Int
, unit_name :: !Text
, display_name :: !Text }
deriving (Show, Eq)
instance FromJSON Period where
parseJSON = withObject "Period" $ \o -> Period
<$> o .: "period_id"
<*> o .: "length_seconds"
<*> o .: "length_months"
<*> o .: "unit_count"
<*> o .: "unit_name"
<*> o .: "display_name"
| coinapi/coinapi-sdk | data-api/haskell-rest/CoinApi/Types/Period.hs | mit | 877 | 0 | 19 | 459 | 149 | 82 | 67 | 30 | 0 |
module App where
import Network.Wai
import Network.Wai.Handler.Warp
import Servant.Server
-- import qualified Data.ByteString as B
import Network.Wai.Middleware.RequestLogger
import Greet
import Handlers
haskapi :: Application
haskapi = serve haskApi handlers
-- Run the server.
--
-- 'run' comes from Network.Wai.Handler.Warp
runTestServer :: Port -> IO ()
runTestServer port = run port $ logStdout haskapi | mooreniemi/haskapi | app.hs | mit | 413 | 0 | 7 | 58 | 85 | 50 | 35 | 11 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module FileCacheSpec (main, spec) where
import Test.Hspec
import Prelude as P
import Control.Monad.IO.Class
import Data.Map.Lazy as Map
import System.IO.Temp
import Data.Conduit
import qualified Data.Conduit.List as DCL
-- import Data.Conduit.Binary
import System.IO
import Control.Monad.IO.Class
import Data.Serialize as DS
import Data.ByteString as BS
import Control.Monad
import System.FilePath
import Network.BitSmuggler.FileCache as FC
main :: IO ()
main = hspec spec
tempDir = "testDir"
store cache items = forM items (\(k, c) -> FC.put cache k (DCL.sourceList [c]))
checkContains cache items
= forM items $ \((k, c), destination) -> do
path <- FC.lookup cache k
path `shouldBe` (Just destination)
storedContent <- BS.readFile destination
storedContent `shouldBe` c
spec :: Spec
spec = do
describe "load" $ do
it "loads from empty unconfigured dir" $ do
withSystemTempDirectory tempDir $ \root -> do
cache <- (load root :: IO (FileCache String))
nothin <- FC.lookup cache "nonexistant"
nothin `shouldBe` Nothing
return ()
describe "put" $ do
it "contains what was stored" $ do
withSystemTempDirectory tempDir $ \root -> do
cache <- (load root :: IO (FileCache String))
let items = [("k", "bytestring"), ("k2", "bullshit")]
destinations <- store cache items
checkContains cache (P.zip items destinations)
return ()
describe "close" $ do
it "loads correctly after closing" $ do
withSystemTempDirectory tempDir $ \root -> do
cache <- (load root :: IO (FileCache String))
let items = [("k", "bytestring")]
destinations <- store cache items
close cache
reloaded <- (load root :: IO (FileCache String))
checkContains reloaded (P.zip items destinations)
return ()
return ()
| danoctavian/bit-smuggler | BitSmuggler/test/unit/FileCacheSpec.hs | gpl-2.0 | 1,913 | 0 | 21 | 451 | 640 | 329 | 311 | 54 | 1 |
module Main where
import Bio.Core.Sequence
import Bio.Sequence.Fasta
import Control.Monad(unless)
import qualified Data.ByteString.Lazy.Char8 as B
import Data.Foldable (forM_)
import Data.Monoid
import Options.Applicative
import System.FilePath.Posix (joinPath, takeBaseName)
import System.Directory
import System.IO
import Text.Printf (printf)
-- # --------------------------------------------------
data Options = Options {
inputFile :: String,
outDir :: String,
kmerSize :: Integer
} deriving (Show)
-- # --------------------------------------------------
kmerize :: BioSeq a => Integer -> a -> (String, Integer, [B.ByteString])
kmerize n seqRead = (readId, numKmers, kmers)
where readId = toString . seqid $ seqRead
(numKmers, kmers) = findKmers n . unSD . seqdata $ seqRead
-- # --------------------------------------------------
findKmers :: Integer -> B.ByteString -> (Integer, [B.ByteString])
findKmers k xs = (n, findKmers' n k xs)
where n = toInteger (B.length xs) - k + 1
findKmers' n' k' xs'
| n' > 0 = B.take (fromIntegral k') xs'
: findKmers' (n' - 1) k' (B.tail xs')
| otherwise = []
-- # --------------------------------------------------
runWithOptions :: Options -> IO ()
runWithOptions opts = do
let inFile = inputFile opts
let outputDir = outDir opts
let baseName = takeBaseName inFile
let outFileKmers = joinPath [outputDir, baseName ++ ".kmers"]
let outFileLoc = joinPath [outputDir, baseName ++ ".loc"]
outdirExists <- doesDirectoryExist outputDir
unless outdirExists (createDirectory outputDir)
input <- readFasta inFile
let kmers = map (kmerize (kmerSize opts)) input
locFh <- openFile outFileLoc WriteMode
kmerFh <- openFile outFileKmers WriteMode
forM_ kmers $ \(readId, numKmers, readKmers) -> do
hPutStrLn locFh $ printf "%s\t%d" readId numKmers
mapM_ (B.hPutStrLn kmerFh) readKmers
hClose locFh
hClose kmerFh
putStrLn $ printf "Done, wrote files to '%s'" outputDir
-- # --------------------------------------------------
main :: IO ()
main = execParser opts >>= runWithOptions
where
parser = Options <$> strOption
( long "input"
<> short 'i'
<> metavar "INPUT" )
<*> strOption
( long "outdir"
<> short 'o'
<> value "."
<> metavar "OUTDIR" )
<*> option auto
( long "kmer"
<> short 'k'
<> value 20
<> metavar "KMER_SIZE" )
opts = info parser mempty
| hurwitzlab/kmerizer | Main.hs | gpl-2.0 | 2,747 | 0 | 14 | 789 | 774 | 393 | 381 | 64 | 1 |
{-# LANGUAGE Rank2Types, MultiWayIf, CPP #-}
module Text.Pandoc.CrossRef.References.Blocks
( replaceAll
) where
import Text.Pandoc.Definition
import Text.Pandoc.Builder (text, toList)
import Text.Pandoc.Shared (stringify, normalizeSpaces)
import Control.Monad.State hiding (get, modify)
import Data.List
import Data.Maybe
import Data.Monoid
import qualified Data.Map as M
import Data.Accessor
import Data.Accessor.Monad.Trans.State
import Text.Pandoc.CrossRef.References.Types
import Text.Pandoc.CrossRef.Util.Util
import Text.Pandoc.CrossRef.Util.Options
import Text.Pandoc.CrossRef.Util.Template
import Control.Applicative
import Prelude
import Data.Default
replaceAll :: Data a => Options -> a -> WS a
replaceAll opts =
everywhereMBut' (mkQ False isSubfig `extQ` isSubfig') (mkM (replaceBlocks opts) `extM` replaceInlines opts)
. everywhere' (mkT divBlocks `extT` spanInlines)
where
isSubfig (Div (label,cls,_) _)
| "fig:" `isPrefixOf` label = True
| "crossref-stop" `elem` cls = True
isSubfig _ = False
isSubfig' (Span (_,cls,_) _)
| "crossref-stop" `elem` cls = True
isSubfig' _ = False
replaceBlocks :: Options -> Block -> WS Block
replaceBlocks opts (Header n (label, cls, attrs) text')
= do
let label' = if autoSectionLabels opts && not ("sec:" `isPrefixOf` label)
then "sec:"++label
else label
unless ("unnumbered" `elem` cls) $ do
modify curChap $ \cc ->
let ln = length cc
cl = lookup "label" attrs
inc l = init l ++ [(fst (last l) + 1, cl)]
cc' | ln > n = inc $ take n cc
| ln == n = inc cc
| otherwise = cc ++ take (n-ln-1) (zip [1,1..] $ repeat Nothing) ++ [(1,cl)]
in cc'
when ("sec:" `isPrefixOf` label') $ replaceAttrSec label' text' secRefs
return $ Header n (label', cls, attrs) text'
-- subfigures
replaceBlocks opts (Div (label,cls,attrs) images)
| "fig:" `isPrefixOf` label
, Para caption <- last images
= do
idxStr <- replaceAttr opts label (lookup "label" attrs) caption imgRefs
let (cont, st) = runState (replaceAll opts' $ init images) (subFig ^= True $ def)
collectedCaptions =
intercalate (ccsDelim opts)
$ map snd
$ M.toList
$ M.map collectCaps
$ imgRefs_ st
collectCaps v =
applyTemplate
(chapPrefix (chapDelim opts) (refIndex v))
(refTitle v)
(ccsTemplate opts)
vars = M.fromDistinctAscList
[ ("ccs", collectedCaptions)
, ("i", idxStr)
, ("t", caption)
]
capt = applyTemplate' vars $ subfigureTemplate opts
lastRef <- fromJust . M.lookup label <$> get imgRefs
modify imgRefs $ \old ->
M.union
old
(M.map (\v -> v{refIndex = refIndex lastRef, refSubfigure = Just $ refIndex v})
$ imgRefs_ st)
case outFormat opts of
f | isFormat "latex" f ->
return $ Div stopAttr $
[ RawBlock (Format "tex") "\\begin{figure}" ]
++ cont ++
[ Para [RawInline (Format "tex") "\\caption"
, Span stopAttr caption]
, RawBlock (Format "tex") $ mkLaTeXLabel label
, RawBlock (Format "tex") "\\end{figure}"]
_ -> return $ Div (label, "subfigures":cls, attrs) $ cont ++ [Para capt]
where
opts' = opts
{ figureTemplate = subfigureChildTemplate opts
, customLabel = \r i -> customLabel opts ("sub"++r) i
}
replaceBlocks opts (Div (label,_,attrs) [Table title align widths header cells])
| not $ null title
, "tbl:" `isPrefixOf` label
= do
idxStr <- replaceAttr opts label (lookup "label" attrs) title tblRefs
let title' =
case outFormat opts of
f | isFormat "latex" f ->
RawInline (Format "tex") (mkLaTeXLabel label) : title
_ -> applyTemplate idxStr title $ tableTemplate opts
return $ Table title' align widths header cells
replaceBlocks opts cb@(CodeBlock (label, classes, attrs) code)
| not $ null label
, "lst:" `isPrefixOf` label
, Just caption <- lookup "caption" attrs
= case outFormat opts of
f
--if used with listings package,nothing shoud be done
| isFormat "latex" f, listings opts -> return cb
--if not using listings, however, wrap it in a codelisting environment
| isFormat "latex" f ->
return $ Div stopAttr [
RawBlock (Format "tex")
$ "\\begin{codelisting}\n\\caption{"++caption++"}"
, cb
, RawBlock (Format "tex") "\\end{codelisting}"
]
_ -> do
let cap = toList $ text caption
idxStr <- replaceAttr opts label (lookup "label" attrs) cap lstRefs
let caption' = applyTemplate idxStr cap $ listingTemplate opts
return $ Div (label, "listing":classes, []) [
Para caption'
, CodeBlock ([], classes, attrs \\ [("caption", caption)]) code
]
replaceBlocks opts
(Div (label,"listing":_, [])
[Para caption, CodeBlock ([],classes,attrs) code])
| not $ null label
, "lst:" `isPrefixOf` label
= case outFormat opts of
f
--if used with listings package, return code block with caption
| isFormat "latex" f, listings opts ->
return $ CodeBlock (label,classes,("caption",stringify caption):attrs) code
--if not using listings, however, wrap it in a codelisting environment
| isFormat "latex" f ->
return $ Div stopAttr [
RawBlock (Format "tex") "\\begin{codelisting}"
, Para [
RawInline (Format "tex") "\\caption"
, Span stopAttr caption
]
, CodeBlock (label,classes,attrs) code
, RawBlock (Format "tex") "\\end{codelisting}"
]
_ -> do
idxStr <- replaceAttr opts label (lookup "label" attrs) caption lstRefs
let caption' = applyTemplate idxStr caption $ listingTemplate opts
return $ Div (label, "listing":classes, []) [
Para caption'
, CodeBlock ([], classes, attrs) code
]
replaceBlocks opts (Para [Span (label, _, attrs) [Math DisplayMath eq]])
| not $ isFormat "latex" (outFormat opts)
, tableEqns opts
= do
idxStr <- replaceAttr opts label (lookup "label" attrs) [] eqnRefs
return $ Table [] [AlignCenter, AlignRight] [0.9, 0.1] [] [[[Plain [Math DisplayMath eq]], [Plain [Math DisplayMath $ "(" ++ stringify idxStr ++ ")"]]]]
replaceBlocks _ x = return x
replaceInlines :: Options -> Inline -> WS Inline
replaceInlines opts (Span (label,_,attrs) [Math DisplayMath eq])
| "eq:" `isPrefixOf` label
= case outFormat opts of
f | isFormat "latex" f ->
let eqn = "\\begin{equation}"++eq++mkLaTeXLabel label++"\\end{equation}"
in return $ RawInline (Format "tex") eqn
_ -> do
idxStr <- replaceAttr opts label (lookup "label" attrs) [] eqnRefs
let eq' = eq++"\\qquad("++stringify idxStr++")"
return $ Math DisplayMath eq'
replaceInlines opts x@(Image attr@(label,cls,attrs) alt img@(src, tit))
| "fig:" `isPrefixOf` snd img
= do
sf <- get subFig
if | sf -> do
let label' | "fig:" `isPrefixOf` label = label
| otherwise = "fig:" ++ label
idxStr <- replaceAttr opts label' (lookup "label" attrs) alt imgRefs
case outFormat opts of
f | isFormat "latex" f ->
return $ latexSubFigure x label
_ ->
let alt' = applyTemplate idxStr alt $ figureTemplate opts
tit' | "nocaption" `elem` cls = fromMaybe tit $ stripPrefix "fig:" tit
| otherwise = tit
in return $ Image (label, cls, attrs) alt' (src, tit')
| "fig:" `isPrefixOf` label -> do
idxStr <- replaceAttr opts label (lookup "label" attrs) alt imgRefs
let alt' = case outFormat opts of
f | isFormat "latex" f ->
#if MIN_VERSION_pandoc(1,17,0)
alt
#else
RawInline (Format "tex") (mkLaTeXLabel label) : alt
#endif
_ -> applyTemplate idxStr alt $ figureTemplate opts
return $ Image attr alt' img
| otherwise ->
return x
replaceInlines _ x = return x
divBlocks :: Block -> Block
divBlocks (Table title align widths header cells)
| not $ null title
, Just label <- getRefLabel "tbl" [last title]
= Div (label,[],[]) [Table (init title) align widths header cells]
divBlocks x = x
spanInlines :: [Inline] -> [Inline]
spanInlines (math@(Math DisplayMath _eq):ils)
| c:ils' <- dropWhile (==Space) ils
, Just label <- getRefLabel "eq" [c]
= Span (label,[],[]) [math]:ils'
spanInlines x = x
getRefLabel :: String -> [Inline] -> Maybe String
getRefLabel _ [] = Nothing
getRefLabel tag ils
| Str attr <- last ils
, all (==Space) (init ils)
, "}" `isSuffixOf` attr
, ("{#"++tag++":") `isPrefixOf` attr
= init `fmap` stripPrefix "{#" attr
getRefLabel _ _ = Nothing
replaceAttr :: Options -> String -> Maybe String -> [Inline] -> Accessor References RefMap -> WS [Inline]
replaceAttr o label refLabel title prop
= do
chap <- take (chaptersDepth o) `fmap` get curChap
i <- (1+) `fmap` (M.size . M.filter (ap ((&&) . (chap ==) . init . refIndex) (isNothing . refSubfigure)) <$> get prop)
let index = chap ++ [(i, refLabel <> customLabel o label i)]
modify prop $ M.insert label RefRec {
refIndex= index
, refTitle=normalizeSpaces title
, refSubfigure = Nothing
}
return $ chapPrefix (chapDelim o) index
replaceAttrSec :: String -> [Inline] -> Accessor References RefMap -> WS ()
replaceAttrSec label title prop
= do
index <- get curChap
modify prop $ M.insert label RefRec {
refIndex=index
, refTitle=normalizeSpaces title
, refSubfigure = Nothing
}
return ()
latexSubFigure :: Inline -> String -> Inline
latexSubFigure (Image (_, cls, attrs) alt (src, title)) label =
let
title' = fromMaybe title $ stripPrefix "fig:" title
texlabel | null label = []
| otherwise = mkLaTeXLabel label
texalt | "nocaption" `elem` cls = []
| otherwise =
[ RawInline (Format "tex") "["] ++ alt ++ [ RawInline (Format "tex") "]"]
img = Image (label, cls, attrs) alt (src, title')
in Span stopAttr $
[ RawInline (Format "tex") "\\subfloat" ] ++ texalt ++
[ RawInline (Format "tex") "{" ] ++
[img] ++
[ RawInline (Format "tex") $ texlabel ++ "}"]
latexSubFigure x _ = x
stopAttr :: Attr
stopAttr = ([], ["crossref-stop"], [])
| infotroph/pandoc-crossref | lib/Text/Pandoc/CrossRef/References/Blocks.hs | gpl-2.0 | 10,803 | 0 | 23 | 3,091 | 3,866 | 1,959 | 1,907 | 246 | 6 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
import System.Process
import Control.Monad.Identity
#if MIN_VERSION_optparse_applicative(0,13,0)
import Data.Monoid ((<>))
#endif
import Data.Yaml hiding (Parser)
import Data.Yaml.Config
import qualified Data.Map as M
import Data.Map (Map)
import Data.Aeson.Types (typeMismatch)
import Options.Applicative
import Data.List (intercalate)
import System.Directory (createDirectoryIfMissing, canonicalizePath)
import Prelude hiding (log)
parens :: [Char] -> [Char]
parens x = "(" ++ x ++ ")"
-----------------------------------------
-- OPTIONS
data Command = Configure | Cabal [String] | Clean | Command :<> Command
withInfo :: Parser a -> String -> ParserInfo a
withInfo opts desc = info (helper <*> opts) $ progDesc desc
parseExec :: Parser Command
parseExec = (\rest -> Cabal (["v2-exec","--"] ++ rest)) <$> some (argument str (metavar "COMMAND"))
parseRepl :: Parser Command
parseRepl = (\target -> Cabal (["v2-repl"] ++ maybe [] (:[]) target)) <$> optional (argument str (metavar "TARGET"))
parseCabal :: Parser Command
parseCabal = Cabal <$> some (argument str (metavar "COMMAND"))
parseCommand :: Parser Command
parseCommand = subparser $
command "configure" (pure Configure `withInfo` "Re-configure the project on the basis of the styx.yaml file") <>
command "clean" (pure Clean `withInfo` "Remove all styx working files") <>
command "build" (pure (Cabal ["v2-build","all"]) `withInfo` "build all the packages") <>
command "repl" (parseRepl `withInfo` "Start a repl in the nix-shell'ed 1st component of the cabal project") <>
command "exec" (parseExec `withInfo` "Exec a command in the nix-shell'ed cabal project") <>
command "cabal" (parseCabal `withInfo` "Execute an arbitrary cabal command in the nix-shell")
main :: IO ()
main = run =<< execParser (parseCommand `withInfo` "Wrapper around nix-shell, cabal2nix and cabal")
--------------------------------
-- Configuration
data Repo = Repo {repoLocation :: String,
repoRevision :: Maybe String,
repoCabal2NixFlags :: Maybe String}
data Config =
Config {cfgNixpkgsVersion :: Maybe SourceVersion
,cfgLocalPackages :: Map String Repo -- list of local packages (must be on the local filesystem)
,cfgExternalSourceDeps :: Map String Repo -- mapping of package names to locations as understood by cabal2nix
,cfgNixHsDeps :: [String]
-- ^ haskell deps to fetch directly from nix (usually empty for a cabal project, as the cabal file will specifiy deps)
,cfgNixOtherDeps :: [String]
-- ^ Other nix dependencies (non haskell packages)
,cfgDefCompil :: Maybe String
}
data ShellConfig = ShellConfig {}
instance FromJSON Config where
parseJSON (Object v) = Config <$>
v .:? "nixpkgs" <*>
v .:? "local-packages" .!= M.empty <*>
v .:? "source-deps" .!= M.empty <*>
v .:? "nix-deps" .!= [] <*>
v .:? "non-haskell-deps" .!= [] <*>
v .:? "default-compiler"
parseJSON invalid = typeMismatch "Config" invalid
instance FromJSON Repo where
parseJSON (Object v) = Repo <$>
v .: "location" <*> -- location of the repo (in cabal2nix format)
v .:? "revision" <*>
v .:? "cabal2nix"
parseJSON invalid = typeMismatch "Location" invalid
data SourceVersion = GitVersion {gitOwner :: String, gitCommit :: String, gitSha :: String}
| TarballVersion {tarballURL :: String}
instance FromJSON SourceVersion where
parseJSON (Object v) = (GitVersion <$>
v .:? "owner" .!= "NixOS" <*>
v .: "commit" <*>
v .: "sha256")
<|> (TarballVersion <$> v .: "url")
parseJSON invalid = typeMismatch "Git version" invalid
-----------------------------------------
-- Program
locToNix :: String -> Repo -> IO ()
locToNix p (Repo {..}) = do
cmd $ intercalate " " ["cabal2nix",
maybe "" ("--revision=" ++) repoRevision,
maybe "" id repoCabal2NixFlags,
repoLocation, "> .styx/" ++ p ++ ".nix"]
canonicalizeLocalPath :: Repo -> IO Repo
canonicalizeLocalPath (Repo {repoLocation = d,..}) = do
repoLocation <- canonicalizePath d
return (Repo {..})
run :: Command -> IO ()
run c = case c of
a :<> b -> run a >> run b
Configure -> configure
Cabal args -> do
_ <- cmd ("nix-shell .styx/shell.nix --pure --run " ++ show (intercalate " " ("cabal":args)))
return ()
Clean -> cmd "rm -rf .styx"
cmd :: String -> IO ()
cmd x = do
putStrLn x
callCommand x
log :: String -> IO ()
log msg = putStrLn $ "Styx: " ++ msg
configure :: IO ()
configure = do
Config{..} <- loadYamlSettings ["styx.yaml"] [] ignoreEnv
createDirectoryIfMissing False ".styx"
log "Initializing cabal.project"
writeFile "cabal.project" $ unlines $ ("packages:" : [" " ++ repoLocation ++ "/" ++ projectName ++ ".cabal"
| (projectName,Repo {..}) <- M.assocs cfgLocalPackages ] )
log "Running cabal2nix for all local and external packages"
forM_ (M.assocs cfgLocalPackages) $ \(p,r) -> locToNix p =<< (canonicalizeLocalPath r)
forM_ (M.assocs cfgExternalSourceDeps) (uncurry locToNix)
log "Creating shell.nix file"
writeFile ".styx/shell.nix" $ unlines $
["{ nixpkgs ? import <nixpkgs> {}"
, maybe "" ((", compiler ? " ++) . show) cfgDefCompil
," }:"]
++ case cfgNixpkgsVersion of
Nothing -> ["let nixpkgs' = nixpkgs;"]
Just source -> ["let nixpkgs_source ="] ++ case source of
GitVersion {..} -> [" nixpkgs.fetchFromGitHub {"
," owner = " ++ show gitOwner ++ ";"
," repo = \"nixpkgs\";"
," rev = " ++ show gitCommit ++ ";"
," sha256 = " ++ show gitSha ++ ";"
," };"
]
TarballVersion {..} -> ["fetchTarball " ++ show tarballURL ++ ";"]
++ [" nixpkgs' = (import nixpkgs_source){};"]
++ ["in with nixpkgs'.pkgs;"
,"let hp = " ++ maybe "haskellPackages" (const "haskell.packages.${compiler}") cfgDefCompil ++ ".override{"
," overrides = self: super: {"
]
++ [" " ++ n ++ " = self.callPackage ./" ++ n ++ ".nix {};"
| n <- (M.keys cfgExternalSourceDeps ++ M.keys cfgLocalPackages)]
++ [" };};"
," getHaskellDeps = ps: path:"
," let f = import path;"
," gatherDeps = { " ++ concat [d ++ " ? [], " | d <- depKinds] ++ "...}:"
," " ++ intercalate " ++ " depKinds ++ ";"
," x = f (builtins.intersectAttrs (builtins.functionArgs f)"
," (ps // ",
" nixpkgs'.pkgs) # can also depend on non-haskell packages",
" // {lib = lib; mkDerivation = gatherDeps;});"
," in x;"
,"ghc = hp.ghcWithPackages (ps: with ps; lib.lists.subtractLists"
, "[" ++ intercalate " " (M.keys cfgLocalPackages) ++ "]" -- Here we remove the packages that we provide locally in the sandbox
, "([ cabal-install "
, intercalate " " (M.keys cfgExternalSourceDeps ++ cfgNixHsDeps)
," ] " ++ concat [" ++ getHaskellDeps ps ./" ++ n ++ ".nix"| n <- M.keys cfgLocalPackages] ++ "));"
,"in"
,"pkgs.stdenv.mkDerivation {"
," name = \"my-haskell-env-0\";"
," buildInputs = [ glibcLocales ghc " ++ intercalate " " (map parens cfgNixOtherDeps) ++ "];" -- todo system build inputs here
," shellHook = ''"
," export LANG=en_US.UTF-8"
," eval $(egrep ^export ${ghc}/bin/ghc)"
,"'';"
,"}"]
run (Cabal ["v2-configure"]) -- this will fail unless the sandbox dependencies are built first.
depKinds :: [String]
depKinds = ["buildDepends", "libraryHaskellDepends", "executableHaskellDepends", "libraryToolDepends", "executableToolDepends"]
-- Local Variables:
-- dante-methods: (impure-nix)
-- End:
| jyp/styx | styx/Main.hs | gpl-2.0 | 8,489 | 71 | 22 | 2,421 | 2,055 | 1,102 | 953 | 154 | 4 |
{-# LANGUAGE TemplateHaskell, FlexibleInstances, FlexibleContexts, ViewPatterns, RecordWildCards, NamedFieldPuns, ScopedTypeVariables, TypeSynonymInstances, NoMonomorphismRestriction, TupleSections, StandaloneDeriving, GeneralizedNewtypeDeriving #-}
module Tetrahedron.Tests where
import Tetrahedron
import Test.QuickCheck
import Test.QuickCheck.All
import QuickCheckUtil
import Element
import Data.List as L
import Data.Proxy
import HomogenousTuples
import Control.Monad
import Tetrahedron.NormalDisc
import Data.Ix
import Simplicial.DeltaSet3
import Data.AscTuples
import QuadHalf
qc_Tetrahedron = $quickCheckAll
-- * Vertex
prop_viewUnviewVertex :: Vertex -> Property
prop_viewUnviewVertex v = v .=. unviewVertex (viewVertex v)
prop_toFromWord8 :: Vertex -> Property
prop_toFromWord8 v = v .=. vertexFromWord8 (vertexToWord8 v)
prop_otherVertices :: Vertex -> Bool
prop_otherVertices v =
asList (otherVertices v) == sort (filter (/= v) allVertices)
-- * Edge
prop_OrderableFace_Edge :: Property
prop_OrderableFace_Edge = polyprop_OrderableFace (undefined :: Proxy Edge)
prop_EnumEdge :: Property
prop_EnumEdge = forAll (elements [0..5]) (\n -> fromEnum (toEnum n :: Edge) .=. n)
prop_OppositeEdge_Order2 :: Edge -> Property
prop_OppositeEdge_Order2 e = let e' = oppositeEdge e in (e' /= e) .&. (oppositeEdge e' == e)
prop_OppositeEdge_disjoint :: Edge -> Property
prop_OppositeEdge_disjoint e = L.intersect (edgeVertexList e) (edgeVertexList (oppositeEdge e)) .=. []
where
edgeVertexList = asList . edgeVertices
prop_EnumOEdge :: Property
prop_EnumOEdge = forAll (elements [0..11]) (\n -> fromEnum (toEnum n :: OEdge) .=. n)
prop_OrderableFace_IEdge :: Property
prop_OrderableFace_IEdge = polyprop_OrderableFace (undefined :: Proxy IEdge)
prop_joinIVertexAndEdge :: ITriangle -> Property
prop_joinIVertexAndEdge t =
forAllElements (vertexList t)
(\v ->
t .=. joinIVertexAndEdge v (edgeByOppositeVertexAndTriangle (unI v) (unI t)))
prop_verticesOfTriangle :: Triangle -> Property
prop_verticesOfTriangle t =
asList (verticesOfTriangle t)
.=.
sort (filter (`isVertexOfTriangle` t) allVertices)
prop_MakeTriangle_VVV :: Vertex -> Property
prop_MakeTriangle_VVV v0 =
forAll (elements vs') $
\v1 -> forAll (elements (vs' \\ [v1])) $
\v2 ->
let v012 = (v0,v1,v2) in asList v012 `setEq` (asList . verticesOfTriangle) (triangle v012)
where
vs' = allVertices \\ [v0]
prop_MakeTriangle_EE :: Edge -> Edge -> Property
prop_MakeTriangle_EE e1 e2 = (e1 /= e2 && e1 /= oppositeEdge e2) ==>
let
t = triangle (e1,e2)
in
(e1 `isEdgeOfTriangle` t) .&. (e2 `isEdgeOfTriangle` t)
prop_OrderableFace_ITriangle :: Property
prop_OrderableFace_ITriangle = polyprop_OrderableFace (undefined :: Proxy ITriangle)
prop_OrderableFace_Triangle :: Property
prop_OrderableFace_Triangle = polyprop_OrderableFace (undefined :: Proxy Triangle)
prop_trianglesContainingVertex :: Vertex -> Property
prop_trianglesContainingVertex v =
setEq
(asList (trianglesContainingVertex v))
(filter (isVertexOfTriangle v) allTriangles)
prop_IsSubface_transitive :: Vertex -> Edge -> Triangle -> Property
prop_IsSubface_transitive v e f = (isSubface v e && isSubface e f) ==> isSubface v e
prop_IsSubface_count_VE :: Vertex -> Bool
prop_IsSubface_count_VE v = length ( filter6 (v `isSubface`) allEdges' ) == 3
prop_IsSubface_count_VF :: Vertex -> Bool
prop_IsSubface_count_VF v = length ( filter4 (v `isSubface`) allTriangles' ) == 3
prop_IsSubface_count_EF :: Edge -> Bool
prop_IsSubface_count_EF e = length ( filter4 (e `isSubface`) allTriangles' ) == 2
prop_edgeByOppositeVertexAndTriangle :: Vertex -> Triangle -> Property
prop_edgeByOppositeVertexAndTriangle v t | isSubface v t = (isSubface e t .&. not (isSubface v e))
| otherwise = expectFailure (seq e True)
where
e = edgeByOppositeVertexAndTriangle v t
prop_edgesContainingVertex :: Vertex -> Bool
prop_edgesContainingVertex v = all3 (isSubface v) (star v (OneSkeleton AbsTet))
prop_starVertexInTwoSkel :: Vertex -> Bool
prop_starVertexInTwoSkel v = all3 (isSubface v) (star v (TwoSkeleton AbsTet))
prop_VerticesToOTriangle :: Vertex -> Property
prop_VerticesToOTriangle v0 =
forAll (arbitrary `suchThat` (/= v0)) $ \v1 ->
forAll (arbitrary `suchThat` (liftM2 (&&) (/= v0) (/= v1))) $ \v2 ->
let vs = (v0,v1,v2) in vs .=. vertices (oTriangleByVertices vs)
prop_VerticesToOEdge :: Vertex -> Property
prop_VerticesToOEdge v0 =
forAll (arbitrary `suchThat` (/= v0)) $ \v1 ->
let vs = (v0,v1) in vs .=. vertices (verticesToOEdge vs)
prop_MakeEdge :: (Vertex,Vertex) -> Property
prop_MakeEdge vs@(v0,v1) = v0 < v1 ==> (vertices (edge vs) == vs)
prop_Triangle_NormalArcs_correct :: Triangle -> Bool
prop_Triangle_NormalArcs_correct t = all3 (`isSubface` t) (normalArcs t)
prop_Triangle_NormalArcs_complete :: NormalArc -> Triangle -> Property
prop_Triangle_NormalArcs_complete nat t =
isSubface nat t ==>
any3 (==nat) (normalArcs t)
prop_normalArcByNormalCorners :: NormalArc -> Property
prop_normalArcByNormalCorners na =
na == normalArc (nc1,nc2)
.&.
na == normalArc (nc2,nc1)
where
(nc1,nc2) = normalCorners na
prop_normalArcGetAngle :: NormalArc -> Property
prop_normalArcGetAngle na =
v .=. normalArcGetVertex na
.&.
triangle vs .=. normalArcGetTriangle na
where
vs@(_,v,_) = normalArcGetAngle na
prop_normalArcGetAngle_corners :: NormalArc -> Property
prop_normalArcGetAngle_corners na =
na .=. normalArc (nc0,nc1)
where
(v0,v,v1) = normalArcGetAngle na
nc0 = normalCorner (v0,v)
nc1 = normalCorner (v1,v)
prop_NormalCornersOfNormalArc_distinct :: NormalArc -> Bool
prop_NormalCornersOfNormalArc_distinct nat = let (c1,c2) = normalCorners nat in c1 /= c2
prop_normalArcsAroundVertex :: Vertex -> Property
prop_normalArcsAroundVertex v =
setEq
(asList . normalArcsAroundVertex $ v)
(filter ((==v) . normalArcGetVertex) allNormalArcs)
prop_normalQuadGetIntersectedEdges :: NormalQuad -> Bool
prop_normalQuadGetIntersectedEdges nqt =
sort allEdges == sort (toList4 (normalQuadGetIntersectedEdges nqt)
++ asList (normalQuadGetDisjointEdges nqt))
prop_NormalDisc_NormalArcs_correct :: NormalDisc -> Bool
prop_NormalDisc_NormalArcs_correct ndt = all (`isSubface` ndt) (normalArcs ndt)
prop_NormalDisc_NormalArcs_complete :: NormalArc -> NormalDisc -> Property
prop_NormalDisc_NormalArcs_complete nat ndt =
isSubface nat ndt ==>
any (==nat) (normalArcs ndt)
prop_normalQuadByNormalArc :: NormalArc -> Bool
prop_normalQuadByNormalArc na = isSubface na (normalQuadByNormalArc na)
prop_normalTriByNormalArc :: NormalArc -> Bool
prop_normalTriByNormalArc na = isSubface na (normalTriByNormalArc na)
prop_link_nc_nq :: NormalCorner -> NormalQuad -> Property
prop_link_nc_nq nc nq =
isSubface nc nq ==>
let
(nc0,nc1) = link nc nq
in
conjoin' [
isSubface nc0 nq,
isSubface nc1 nq,
nc0 /= nc,
nc1 /= nc,
nc0 /= nc1
]
-- | Laws from the 'Ix' documentation
prop_Ix_NormalDisc :: NormalDisc -> NormalDisc -> Property
prop_Ix_NormalDisc (l :: NormalDisc) u =
(\i -> inRange (l,u) i == elem i (range (l,u)))
.&&.
(\i -> inRange (l,u) i ==> range (l,u) !! index (l,u) i == i)
.&&.
(map (index (l,u)) (range (l,u)) == [0..rangeSize (l,u)-1])
.&&.
(rangeSize (l,u) == length (range (l,u)))
polyprop_SatisfiesSimplicialIdentities2 :: (Eq (Vert (Ed t)), Show (Vert (Ed t)), Show (Ed t), SatisfiesSimplicialIdentities2 t) => t -> Property
polyprop_SatisfiesSimplicialIdentities2 t =
printTestCase ("Checking that 'vertices' equals 'defaultVerticesOfTri'")
(vertices t .=. defaultVerticesOfTri t)
.&&.
case edges t of
dits@(d0,d1,d2) ->
printTestCase ("d0 t = "++show d0) $
printTestCase ("d1 t = "++show d1) $
printTestCase ("d2 t = "++show d2) $
case map3 vertices dits of
( (d0d0,d1d0)
,(d0d1,d1d1)
,(d0d2,d1d2))
->
printTestCase ("Checking d0 . d0 = d0 . d1")
(d0d0 .=. d0d1)
.&&.
printTestCase ("Checking d1 . d0 = d0 . d2")
(d1d0 .=. d0d2)
.&&.
printTestCase ("Checking d1 . d1 = d1 . d2")
(d1d1 .=. d1d2)
polyprop_DeltaSet2 s =
forAllElements (triangles s) (polyprop_SatisfiesSimplicialIdentities2)
polyprop_SatisfiesSimplicialIdentities3
:: (Eq (Vert tet), Show (Vert tet), Eq (Ed (Tri tet)), Show (Ed (Tri tet)), Show (Tri tet), SatisfiesSimplicialIdentities3 tet) =>
tet -> Property
polyprop_SatisfiesSimplicialIdentities3 t =
vertices t .=. defaultVerticesOfTet t
.&&.
edges t .=. defaultEdgesOfTet t
.&&.
case triangles t of
dits ->
printTestCase (show dits) $
case map4 edges dits of
( (d0d0,d1d0,d2d0)
,(d0d1,d1d1,d2d1)
,(d0d2,d1d2,d2d2)
,(d0d3,d1d3,d2d3)
)
->
printTestCase ("Checking d0 . d0 = d0 . d1")
(d0d0 .=. d0d1)
.&&.
printTestCase ("Checking d1 . d0 = d0 . d2")
(d1d0 .=. d0d2)
.&&.
printTestCase ("Checking d1 . d1 = d1 . d2")
(d1d1 .=. d1d2)
.&&.
printTestCase ("Checking d2 . d0 = d0 . d3")
(d2d0 .=. d0d3)
.&&.
printTestCase ("Checking d2 . d1 = d1 . d3")
(d2d1 .=. d1d3)
.&&.
printTestCase ("Checking d2 . d2 = d2 . d3")
(d2d2 .=. d2d3)
prop_Asc3_Simpidents :: Asc3 Int -> Property
prop_Asc3_Simpidents = polyprop_SatisfiesSimplicialIdentities2
prop_Asc4_Simpidents :: Asc4 Int -> Property
prop_Asc4_Simpidents = polyprop_SatisfiesSimplicialIdentities3
prop_Triangle_Simpidents :: Triangle -> Property
prop_Triangle_Simpidents = polyprop_SatisfiesSimplicialIdentities2
prop_ITriangle_Simpidents :: ITriangle -> Property
prop_ITriangle_Simpidents = polyprop_SatisfiesSimplicialIdentities2
prop_AbsTet_Simpidents :: Property
prop_AbsTet_Simpidents = polyprop_SatisfiesSimplicialIdentities3 AbsTet
prop_TIndex_Simpidents :: TIndex -> Property
prop_TIndex_Simpidents = polyprop_SatisfiesSimplicialIdentities3
prop_MapAbsTet_id :: Property
prop_MapAbsTet_id =
(\x -> mapVert (MapAbsTet AbsTet) x == x)
.&&.
(\x -> mapEd (MapAbsTet AbsTet) x == x)
.&&.
(\x -> mapTri (MapAbsTet AbsTet) x == x)
.&&.
mapTet (MapAbsTet AbsTet) AbsTet == AbsTet
instance SatisfiesSimplicialIdentities2 (QuadHalf NormalQuad)
prop_Quad_Simpidents :: QuadHalf NormalQuad -> Property
prop_Quad_Simpidents = polyprop_SatisfiesSimplicialIdentities2
| DanielSchuessler/hstri | Tetrahedron/Tests.hs | gpl-3.0 | 11,846 | 0 | 20 | 3,253 | 3,160 | 1,671 | 1,489 | -1 | -1 |
{-# LANGUAGE OverlappingInstances,
EmptyDataDecls,
MultiParamTypeClasses,
DataKinds,
FunctionalDependencies,
FlexibleContexts,
RankNTypes,
TypeSynonymInstances,
ScopedTypeVariables,
UnicodeSyntax,
GADTSyntax,
GADTs,
TypeFamilies,
ConstraintKinds,
InstanceSigs,
GeneralizedNewtypeDeriving,
StandaloneDeriving,
ViewPatterns,
UndecidableInstances,
FlexibleInstances #-}
-- ImpredicativeTypes,
-- ExplicitForAll,
-- IncoherentInstances,
module Music where
-- Todo: make this file literate Haskell (.lhs), due to the large
-- number of long comments.
-- Also todo: split this file out a bit, it's now over 1000 lines!
import Prelude hiding (negate)
import qualified Data.Map as Map
import Data.Ratio
import Data.Complex
import Control.Monad
import Data.AdditiveGroup
import Data.AffineSpace
import Data.VectorSpace
import Data.Semigroup hiding (Min)
-- import qualified Data.Music.Lilypond as L
import Util (interleave, iterateM,
compose, member, intersection,
remove, nd, foldSG, under, divides,
listDiff, uniq)
-------- Main type declarations:
data AbstractPitch1 = AbstractPitch1 Degree Ficta deriving Eq -- scale degrees
data AbstractPitch2 = AbstractPitch2 Name Accidental -- pitch
data AbstractPitch3 = AbstractPitch3 Freq deriving Eq -- frequencies
-- type Figuring = AbstractInt1 -- speculative ... for figured bass
data AbstractInt1 = AbstractInt1 Skip Ficta deriving (Eq, Show) -- "intervals" between scale degrees
data AbstractInt2 = AbstractInt2 Quality Number -- intervals between ordinary pitches
data AbstractInt3 = AbstractInt3 FreqRat deriving Eq -- ratios between frequencis
data AbstractDur1 = AbstractDur1 MDur deriving (Eq, Show, Ord) -- prolations
data AbstractDur2 = AbstractDur2 (Ratio Int) deriving (Eq, Ord) -- note durations
data AbstractDur3 = AbstractDur3 Length deriving (Eq, Ord) -- actual duration in milliseconds
data Name = A | B | C | D | E | F | G | Up Name | Down Name deriving (Eq)
data Accidental = Na | Fl Accidental | Sh Accidental deriving Eq
-- data Degree = Ut | Re | Mi | Fa | Sol | La | LN | DUp Degree | DDown Degree deriving (Eq, Show)
data Degree = Ut | Re | Mi | Fa | Sol | La | Si deriving (Eq, Show)
data Ficta = Raise | Neutral | Lower deriving Eq
-- todo: something more convenient than this, e.g. R | N | L for ficta and U | D for octaves
type FreqRat = Double -- ratio of frequencies
type Freq = Double -- frequency in Hz
type Length = Double
-- see also https://en.wikipedia.org/wiki/Rhythmic_mode
data MDur = Mx | Ln | Br | Sb | Mn | Sm | Ff | Sf | MTie MDur MDur | Punctus MDur
deriving (Eq, Show)
data Skip = Fir | Sec | Thi | Fou | Fif | Six | Sev
| Com Skip
| Neg Skip
deriving (Eq, Show)
data Number = Unison | Second | Third | Fourth | Fifth | Sixth | Seventh
| Compound Number
| Negative Number
deriving Eq
data Quality = Perf | Maj | Min | Aug Quality | Dim Quality deriving Eq
data Metronome = Metronome Int deriving Eq
-- A note can be one of:
-- AbstractPitch (an absolute pitch) \
-- AbstractInt (a relative pitch) | these have durations and pitches
-- Rest (just a duration, no sound) /
-- Conn (a pointer to another phrase that starts simultaneously with the following 'real' note) \ these have neither durations
-- Dir (a convenient way of putting 'inaudible' symbols/other commands in the output) / nor pitches.
data AbstractNote p i d where
AbstractPitch :: (Note p i d, Show p, Show d) => p -> d -> (AbstractNote p i d)
AbstractInt :: (Note p i d, Show i, Show d) => i -> d -> (AbstractNote p i d)
Rest :: (Duration d, Show d) => d -> (AbstractNote p i d)
Conn :: (Show p, Show i, Show d, Note p i d) => AbstractPhrase (AbstractNote p i d) -> (AbstractNote p i d)
Dir :: (Note p i d) => (Directive p i d) -> AbstractNote p i d
deriving instance Eq (AbstractNote p i d)
coerceNote :: (Note p i d, Note p' i' d') => AbstractNote p i d -> AbstractNote p' i' d'
-- coerceNote (Dir d) = Dir d
coerceNote n = error $ "Don't know how to coerce " ++ (show n)
isConn (Conn _) = True
isConn _ = False
isNote (AbstractPitch _ _) = True
isNote (AbstractInt _ _) = True
isNote (Rest _) = True
isNote _ = False
data Directive p i d where
-- Tempo :: (Note p i d, Timing t d) => t -> Directive p i d -- hmmm
Tempo :: (Note p i d) => Metronome -> Directive p i d
-- Retune :: (Note p i d, Tuning t p i) => t -> Directive p i d -- hmmm
Figuring :: (Note p i d) => [i] -> Directive p i d
deriving instance Eq (Directive p i d)
deriving instance Show (Directive p i d)
-- Note: we *could* make Conn look like this:
-- Conn :: (Note p i d, Note p' i' d') => AbstractPhrase (AbstractNote p' i' d') -> (AbstractNote p i d)
-- because ideally we'd like Conn to be able to point to an
-- AbstractPhrase of arbitrary type; but this breaks mapPhrase (and
-- everything else) due to GADTs being hard.
-- A phrase of a particular type of note. The fact that notes can
-- themselves be pointers to other phrases (see the Conn constructor)
-- makes this a *bit* like a rose tree.
data AbstractPhrase n where
AbstractPhrase :: (Note p i d) => [AbstractNote p i d] -> AbstractPhrase (AbstractNote p i d)
deriving instance Eq (AbstractPhrase n)
-- A collection of phrases ('Voices') forms a piece of music -- or,
-- alternatively, one single phrase ('Start') starts the whole piece
-- off, and the other phrases split of from it using Conn
-- constructors,
data Music n where
Start :: (Show p, Show i, Show d, Note p i d) => AbstractPhrase (AbstractNote p i d) -> Music (AbstractNote p i d)
Voices :: (Show p, Show i, Show d, Note p i d) => [AbstractPhrase (AbstractNote p i d)] -> Music (AbstractNote p i d)
deriving instance Show (Music n)
-- Ignore the order of voices when comparing music values
instance Eq (Music n) where
(Voices vs) == (Voices vs') = null $ vs `listDiff` vs'
(Voices vs) == p@(Start _) = null $ vs `listDiff` (voiceList $ explodeVoices p)
p@(Start _) == vs@(Voices _) = vs == p
p@(Start _) == p'@(Start _) = (explodeVoices p) == (explodeVoices p')
voiceList :: (Note p i d, n ~ AbstractNote p i d) => Music n -> [AbstractPhrase n]
voiceList (Voices v) = v
voiceList (Start m) = voiceList $ explodeVoices (Start m)
-------- Type instances of the above types for Ord, Eq, Show etc.
instance Ord AbstractInt1 where
compare = compare `under` (\(AbstractInt1 s _) -> fromEnum s)
instance Ord AbstractPitch1 where
compare = compare `under` (\(AbstractPitch1 d _) -> fromEnum d)
instance Ord AbstractInt2 where
compare = compare `under` (\(AbstractInt2 _ n) -> fromEnum n)
instance Ord AbstractPitch2 where
compare = compare `under` (\(AbstractPitch2 n _) -> fromEnum n)
instance Ord AbstractInt3 where
compare = compare `under` (\(AbstractInt3 f) -> f)
instance Ord AbstractPitch3 where
compare = compare `under` (\(AbstractPitch3 f) -> f)
instance Ord MDur where
compare _ _ = error "Not implemented yet"
instance Show AbstractPitch1 where
show (AbstractPitch1 d f) = (show d) ++ (show f)
instance Show Ficta where
show Raise = "↑"
show Neutral = "-"
show Lower = "↓"
instance Show Name where
show A = "A"
show B = "B"
show C = "C"
show D = "D"
show E = "E"
show F = "F"
show G = "G"
show (Up n) = (show n) ++ "’"
show (Down n) = (show n) ++ "‚"
instance Show Accidental where
show Na = "♮"
show (Fl Na) = "♭"
show (Sh Na) = "♯"
show (Fl (Fl Na)) = "𝄫"
show (Sh (Sh Na)) = "𝄪"
show (Fl (Fl a)) = '𝄫' : (show a)
show (Sh (Sh a)) = '𝄪' : (show a)
show (Fl a) = '♭' : (show a)
show (Sh a) = '♯' : (show a)
instance Show AbstractPitch2 where
show (AbstractPitch2 n a) = (show n) ++ (show a)
instance Bounded Freq where
-- (limits of human hearing)
minBound = 20
maxBound = 20e3
instance Show AbstractPitch3 where
show (AbstractPitch3 f) = showFreq f
instance Show AbstractInt3 where
show (AbstractInt3 f) = show f
showFreq = (++ " Hz") . show
instance Show AbstractDur2 where
show (AbstractDur2 r) = show r
instance Show AbstractDur3 where
show (AbstractDur3 f) = (show f) ++ " ms"
instance Show Number where
show Unison = "1"
show Second = "2"
show Third = "3"
show Fourth = "4"
show Fifth = "5"
show Sixth = "6"
show Seventh = "7"
show (Compound l) = let x = ((read (show l)) :: Int)
in show (if x < 0 then (x - 7) else (x + 7))
show (Negative l) = show (-1 * ((read (show l)) :: Int))
instance Show Quality where
show Perf = "P"
show Maj = "M"
show Min = "m"
show (Dim Perf) = "d"
show (Dim Maj) = "d"
show (Dim Min) = "d"
show (Aug Perf) = "A"
show (Aug Maj) = "A"
show (Aug Min) = "A"
show (Aug q) = 'A':(show q)
show (Dim q) = 'd':(show q)
instance Show AbstractInt2 where
show (AbstractInt2 q l) = (show q) ++ (show l)
instance Show Metronome where
show (Metronome n) = "𝅘𝅥 = " ++ (show n)
------------------
class (Transpose p i, Duration d) => Note p i d where
phrase :: [AbstractNote p i d] -> AbstractPhrase (AbstractNote p i d)
phrase ns = AbstractPhrase ns
note :: p -> d -> AbstractNote p i d
note p d = AbstractPitch p d
rest :: d -> AbstractNote p i d
rest d = Rest d
class (Pitch p, Interval i, AffineSpace p, VectorSpace i) => Transpose p i | p -> i, i -> p where
transpose :: i -> p -> p
interval :: p -> p -> i
normalise :: p -> i -> p -> p
normalise _ _ _ = undefined
class (Show p, Eq p, Ord p, AffineSpace p) => Pitch p where
incr :: p -> p
decr :: p -> p
sharpen :: p -> p
flatten :: p -> p
middle :: p
class (Show i, Eq i, AdditiveGroup i) => Interval i where
unison :: i
octave :: i
invert :: i -> i
invert i = sub octave i
negate :: i -> i
negate i = sub unison i
add :: i -> i -> i
sub :: i -> i -> i
augment :: i -> i
diminish :: i -> i
grow :: i -> i
shrink :: i -> i
class (Transpose p i) => Scale s p i | s -> p i where
tonic :: s -> AbstractPitch2
tonic = head . scale
final :: s -> AbstractPitch2
final s = (tonic s) .+^ octave
scale :: s -> [AbstractPitch2]
applyScale :: s -> p -> AbstractPitch2
class (Semigroup d, Show d, Eq d, Ord d) => Duration d where
unit :: d -- The 'base' duration that time signatures refer to
combine :: d -> d -> d
zeroD :: d -- Optional, obviously if it exists it should be the identity for combine
subD :: d -> d -> d -- Optional, obviously subD d d = zeroD
tie :: d -> d -> d
tie = combine
showDur :: d -> String
showDur d = show d
showRest :: d -> String
showRest d = show d
class Mensuration m where
mensurate :: m -> AbstractDur1 -> AbstractDur2
-- Essentially a tuning system is anything that implements 'tuneInt'
-- (or 'tune' and 'tuneInt'). But, other than that, it's up to you. If
-- more configurability is needed, just write a Tuning type whose
-- constructor has lots of parameters -- or whose implementation of
-- 'tune' does something more complicated that just utilise 'tuneInt',
-- etc. (see DummyTuning in Tuning.hs for a facetious example)
class (Transpose p i) => Tuning t p i | t -> p i where
-- Important: implementation of either tune or tuneInt is required!
base :: t -> (p, AbstractPitch3) -- e.g. (A Na, 440)
tuneInt :: t -> i -> AbstractInt3
tune :: t -> p -> AbstractPitch3
tune t p' = let (p, r) = base t
in r .+^ (tuneInt t (interval p p'))
tuneNote :: Note p i d => t -> AbstractNote p i d -> AbstractNote AbstractPitch3 AbstractInt3 d
tuneNote t (AbstractPitch p d) = AbstractPitch (tune t p) d
tuneNote t (AbstractInt i d) = AbstractInt (tuneInt t i) d
tuneNote _ (Rest d) = Rest d
tuneNote _ d = coerceNote d
-- Any way of specifying a concrete realisation of tempo -- some magic
-- involving the IO monad may allow for accelerations etc.
class (Duration d, Eq d, Eq t) => Timing t d | t -> d where
time :: t -> d -> AbstractDur3
timeNote :: Note p i d => t -> AbstractNote p i d -> AbstractNote p i AbstractDur3
timeNote t (AbstractPitch p d) = AbstractPitch p (time t d)
timeNote t (AbstractInt i d) = AbstractInt i (time t d)
timeNote t (Rest d) = Rest (time t d)
timeNote _ d = coerceNote d
--------------
instance Ord Name where
m `compare` n = (fromEnum m) `compare` (fromEnum n)
instance Enum Name where
fromEnum A = 0
fromEnum B = 1
fromEnum C = 2
fromEnum D = 3
fromEnum E = 4
fromEnum F = 5
fromEnum G = 6
fromEnum (Up n) = (fromEnum n) + 7
fromEnum (Down n) = (fromEnum n) - 7
toEnum 0 = A
toEnum 1 = B
toEnum 2 = C
toEnum 3 = D
toEnum 4 = E
toEnum 5 = F
toEnum 6 = G
toEnum n
| (n < 0) = Down (toEnum (n + 7))
| otherwise = Up (toEnum (n - 7))
instance Enum Degree where
fromEnum Ut = 0
fromEnum Re = 1
fromEnum Mi = 2
fromEnum Fa = 3
fromEnum Sol = 4
fromEnum La = 5
fromEnum Si = 6
-- fromEnum (DUp d) = (fromEnum d) + 7
-- fromEnum (DDown d) = (fromEnum d) - 7
toEnum 0 = Ut
toEnum 1 = Re
toEnum 2 = Mi
toEnum 3 = Fa
toEnum 4 = Sol
toEnum 5 = La
toEnum 6 = Si
-- toEnum n
-- | (n < 0) = DDown (toEnum (n + 7))
-- | otherwise = DUp (toEnum (n - 7))
instance Enum Skip where
fromEnum Fir = 0
fromEnum Sec = 1
fromEnum Thi = 2
fromEnum Fou = 3
fromEnum Fif = 4
fromEnum Six = 5
fromEnum Sev = 6
-- fromEnum (Com s) = 7 + (fromEnum s)
-- fromEnum (Neg s) = -1 * (fromEnum s)
toEnum n
-- | (n < 0) = Neg (toEnum (-1 * n))
-- | (n == 7) = Com Fir
-- | (n > 7) = Com (toEnum (n - 7))
| otherwise = toEnum' n
where toEnum' 0 = Fir
toEnum' 1 = Sec
toEnum' 2 = Thi
toEnum' 3 = Fou
toEnum' 4 = Fif
toEnum' 5 = Six
toEnum' 6 = Sev
instance Ord Skip where
m `compare` n = (fromEnum m) `compare` (fromEnum n)
instance Ord Number where
m `compare` n = (fromEnum m) `compare` (fromEnum n)
instance Enum Number where
fromEnum Unison = 0
fromEnum Second = 1
fromEnum Third = 2
fromEnum Fourth = 3
fromEnum Fifth = 4
fromEnum Sixth = 5
fromEnum Seventh = 6
fromEnum (Compound l) = 7 + (fromEnum l)
fromEnum (Negative l) = -1 * (fromEnum l)
toEnum n
| (n < 0) = Negative (toEnum (-1 * n))
| (n == 7) = Compound Unison
| (n > 7) = Compound (toEnum (n - 7))
| otherwise = toEnum' n
where toEnum' 0 = Unison
toEnum' 1 = Second
toEnum' 2 = Third
toEnum' 3 = Fourth
toEnum' 4 = Fifth
toEnum' 5 = Sixth
toEnum' 6 = Seventh
type Note1 = AbstractNote AbstractPitch1 AbstractInt1 AbstractDur1
type Note2 = AbstractNote AbstractPitch2 AbstractInt2 AbstractDur2
type Note3 = AbstractNote AbstractPitch3 AbstractInt3 AbstractDur3
instance Show (AbstractNote p i d) where
show (AbstractPitch p d) = "Note{" ++ (show p) ++ " " ++ (showDur d) ++ "}"
show (AbstractInt i d) = "Interval{" ++ (show i) ++ " " ++ (showDur d) ++ "}"
show (Rest d) = "Rest{" ++ (showRest d) ++ "}"
show (Conn c) = "Conn{" ++ (show c) ++ "}"
-- show (ConnInt i c) = "{" ++ (show i) ++ "|" ++ (show c) ++ "}"
show (Dir c) = "{" ++ (show c) ++ "}"
-- deriving instance Show (AbstractNote p i d)
-- instance Show Note2 where
-- show (AbstractPitch p d) = "{" ++ (show p) ++ " " ++ (showDur d) ++ "}"
-- show (AbstractInt i d) = "{" ++ (show i) ++ " " ++ (showDur d) ++ "}"
-- show (Rest d) = "{" ++ (showRest d) ++ "}"
-- show (Conn c) = "{" ++ (show c) ++ "}"
-- -- show (ConnInt i c) = "{" ++ (show i) ++ "|" ++ (show c) ++ "}"
-- show (Dir c) = "{" ++ (show c) ++ "}"
------------------------------------
-- Instances of classes defined above.
instance (Transpose p i, Duration d) => Note p i d where
-- instance (Transpose p i, Pitch p, Interval i, Duration d) => Note p i d where
-- instance Note Figuring AbstractInt1 AbstractDur2 where
addFicta Raise Raise = Raise
addFicta Raise Lower = Neutral
addFicta Lower Raise = Neutral
addFicta Lower Lower = Lower
addFicta Neutral f = f
addFicta f Neutral = f
instance Pitch AbstractPitch1 where
sharpen (AbstractPitch1 d f) = AbstractPitch1 d (addFicta Raise f)
flatten (AbstractPitch1 d f) = AbstractPitch1 d (addFicta Lower f)
incr (AbstractPitch1 d f) = AbstractPitch1 (succ d) Neutral
decr (AbstractPitch1 d f) = AbstractPitch1 (pred d) Neutral
middle = AbstractPitch1 Ut Neutral
instance Pitch AbstractPitch2 where
sharpen (AbstractPitch2 n a) = toPitch $ (faPitch n a) + (1 ::+ 0)
flatten (AbstractPitch2 n a) = toPitch $ (faPitch n a) - (1 ::+ 0)
incr (AbstractPitch2 n a) = AbstractPitch2 (succ n) a
decr (AbstractPitch2 n a) = AbstractPitch2 (pred n) a
middle = AbstractPitch2 A Na
instance Eq AbstractPitch2 where
(==) = (==) `under` pitchToFa
cent :: FreqRat
cent = (2 ** (1/1200))
instance Pitch AbstractPitch3 where
sharpen (AbstractPitch3 f) = AbstractPitch3 (f * (1 + 50*cent))
flatten (AbstractPitch3 f) = AbstractPitch3 (f * (1 - 50*cent))
incr (AbstractPitch3 f) = AbstractPitch3 (f * (1 + 100*cent))
decr (AbstractPitch3 f) = AbstractPitch3 (f * (1 - 100*cent))
middle = AbstractPitch3 440
-- instance Pitch Figuring where
-- sharpen = augment
-- flatten = diminish
-- incr = grow
-- decr = shrink
instance Interval AbstractInt1 where
add (AbstractInt1 s f) (AbstractInt1 t g) = AbstractInt1 (toEnum $ (fromEnum s) + (fromEnum t)) Neutral
sub (AbstractInt1 s f) (AbstractInt1 t g) = AbstractInt1 (toEnum $ (fromEnum s) - (fromEnum t)) Neutral
grow (AbstractInt1 s _) = AbstractInt1 ((toEnum . (+ 1) . fromEnum) s) Neutral
shrink (AbstractInt1 s _) = AbstractInt1 ((toEnum . (+(-1)) . fromEnum) s) Neutral
augment (AbstractInt1 s f) = AbstractInt1 s (addFicta Raise f)
diminish (AbstractInt1 s f) = AbstractInt1 s (addFicta Lower f)
unison = AbstractInt1 Fir Neutral
octave = AbstractInt1 (Com Fir) Neutral
instance (Interval i) => AdditiveGroup i where
zeroV = unison
(^+^) = add
negateV = negate
-- not possible, no overlapping associated types allowed in GHC yet :-/
-- instance Interval i => VectorSpace i where
-- type Scalar i = Int
-- (*^) 0 i = zeroV
-- (*^) s i
-- | (s > 0) = i ^+^ ((s - 1) *^ i)
-- | (s < 0) = (negateV i) ^+^ ((s + 1) *^ i)
intervalPowerPositive 0 i = zeroV
intervalPowerPositive s i
| (s > 0) = i ^+^ ((s - 1) *^ i)
| (s < 0) = (negateV i) ^+^ ((s + 1) *^ i)
instance VectorSpace AbstractInt1 where
type Scalar AbstractInt1 = Int
(*^) = intervalPowerPositive
instance VectorSpace AbstractInt2 where
type Scalar AbstractInt2 = Int
(*^) = intervalPowerPositive
instance VectorSpace AbstractInt3 where
type Scalar AbstractInt3 = Double
(*^) s (AbstractInt3 f) = AbstractInt3 $ f ** s
-- instance (Pitch p) => AffineSpace p where -- not possible
-- type (Diff p) = (Transpose p i) => i
-- (.-.) = interval
-- (.+^) = flip transpose
instance AffineSpace AbstractPitch1 where
type Diff AbstractPitch1 = AbstractInt1
(.-.) = flip interval
(.+^) = flip transpose
instance AffineSpace AbstractPitch2 where
type Diff AbstractPitch2 = AbstractInt2
(.-.) = flip interval
(.+^) = flip transpose
instance AffineSpace AbstractPitch3 where
type Diff AbstractPitch3 = AbstractInt3
(.-.) = flip interval
(.+^) = flip transpose
data FreeAbelian = Int ::+ Int deriving (Show, Eq)
-- We're using the type 'FreeAbelian' to represent (n * A1, m * d2).
--
-- Essentially, intervals form a free Abelian group G = {(n*e_1,m*e_2)
-- | (n,m) ∊ ℤ×ℤ} where e_1 = (1,0) and e_2 = (0,1) are two possible
-- elements that can be used as a basis (generators for the group.
-- An interval ratio is then defined as (A1)^n * (d2)^m.
--
-- Note that, in Pythagorean tuning, d2 is comma^(-1) (in negative
-- direction), and m2 + comma = A1 (i.e. m2 < A1).
--
-- Could use P8 & P5 as generators, or m2 & d2, or any pair of
-- linearly independent intervals (linear independence is preserved
-- across basis changes).
instance Ord FreeAbelian where
(_ ::+ a) <= (_ ::+ b) = a <= b
-- It's the second element (m*d2) that gives rise to the *name* of
-- the interval (Unison, Second, Third etc.), so that's what we're
-- most likely going to want to compare on.
instance Num FreeAbelian where
(a ::+ b) + (c ::+ d) = (a + c) ::+ (b + d)
(a ::+ b) - (c ::+ d) = (a - c) ::+ (b - d)
(a ::+ b) * (c ::+ d) = (a*c - b*d) ::+ (b*c + a*d) -- maybe...
fromInteger n = (fromIntegral n) ::+ 0
abs (a ::+ b) = (abs a) ::+ (abs b) -- (cannot give the absolute magnitude of a group element until we know what tuning system we're using)
signum (a ::+ b) = (signum a) ::+ (signum b)
faInt :: Quality -> Number -> FreeAbelian
-- i.e. intervals as elements of the free Abelian group
faInt Perf Unison = 0 ::+ 0
faInt (Aug Perf) Unison = 1 ::+ 0
faInt (Dim Min) Second = 0 ::+ 1
faInt Perf (Compound Unison) = (faInt Maj Seventh) + (faInt Min Second)
faInt q n@(Negative _) = faInt' q n
faInt q n@(Compound _) = faInt' q n
faInt Min Second = (faInt (Dim Min) Second) + (faInt (Aug Perf) Unison)
faInt Maj Second = (faInt Min Second) + (faInt (Aug Perf) Unison)
faInt Min Third = (faInt Maj Second) + (faInt Min Second)
faInt Maj Third = (faInt Maj Second) + (faInt Maj Second)
faInt Perf Fourth = (faInt Maj Third) + (faInt Min Second)
faInt Perf Fifth = (faInt Perf Fourth) + (faInt Maj Second)
faInt Min Sixth = (faInt Perf Fifth) + (faInt Min Second)
faInt Maj Sixth = (faInt Perf Fifth) + (faInt Maj Second)
faInt Min Seventh = (faInt Maj Sixth) + (faInt Min Second)
faInt Maj Seventh = (faInt Maj Sixth) + (faInt Maj Second)
faInt (Dim q) n = (faInt q n) - (faInt (Aug Perf) Unison)
faInt (Aug q) n = (faInt q n) + (faInt (Aug Perf) Unison)
faInt' q n = let comps = countComp n
negs = countNeg n
i = faInt q (justNum n)
in (i + ((comps ::+ 0) * (faInt Perf (Compound Unison)))) * (negs ::+ 0)
countComp (Compound n) = (countComp n) + 1
countComp (Negative n) = countComp n
countComp _ = 0
countNeg (Negative n) = (countNeg n) * (-1)
countNeg (Compound n) = countNeg n
countNeg _ = 1
justNum (Negative n) = justNum n
justNum (Compound n) = justNum n
justNum n = n
faIntNorm (n ::+ m)
| (n <= 0) && (m <= 0) = faIntNorm ((-n) ::+ (-m))
| otherwise = (n - (12 * (oct m))) ::+ (m `mod` 7)
toInterval (a ::+ d) = AbstractInt2 (faIntToQual (a ::+ d)) (toEnum d)
intToFa (AbstractInt2 q n) = faInt q n
faIntToQual (n ::+ m)
| (n < 0) && (m == 0) = Dim (faIntToQual ((n + 1) ::+ m))
| (n ::+ m) == (0 ::+ 0) = Perf
| (n > 0) && (m == 0) = Aug (faIntToQual ((n - 1) ::+ m))
| (n < 1) && (m == 1) = Dim (faIntToQual ((n + 1) ::+ m))
| (n ::+ m) == (1 ::+ 1) = Min
| (n ::+ m) == (2 ::+ 1) = Maj
| (n > 2) && (m == 1) = Aug (faIntToQual ((n - 1) ::+ m))
| (n < 3) && (m == 2) = Dim (faIntToQual ((n + 1) ::+ m))
| (n ::+ m) == (3 ::+ 2) = Min
| (n ::+ m) == (4 ::+ 2) = Maj
| (n > 4) && (m == 2) = Aug (faIntToQual ((n - 1) ::+ m))
| (n < 5) && (m == 3) = Dim (faIntToQual ((n + 1) ::+ m))
| (n ::+ m) == (5 ::+ 3) = Perf
| (n > 5) && (m == 3) = Aug (faIntToQual ((n - 1) ::+ m))
| (n < 7) && (m == 4) = Dim (faIntToQual ((n + 1) ::+ m))
| (n ::+ m) == (7 ::+ 4) = Perf
| (n > 7) && (m == 4) = Aug (faIntToQual ((n - 1) ::+ m))
| (n < 8) && (m == 5) = Dim (faIntToQual ((n + 1) ::+ m))
| (n ::+ m) == (8 ::+ 5) = Min
| (n ::+ m) == (9 ::+ 5) = Maj
| (n > 9) && (m == 5) = Aug (faIntToQual ((n - 1) ::+ m))
| (n < 10) && (m == 6) = Dim (faIntToQual ((n + 1) ::+ m))
| (n ::+ m) == (10 ::+ 6) = Min
| (n ::+ m) == (11 ::+ 6) = Maj
| (n > 11) && (m == 6) = Aug (faIntToQual ((n - 1) ::+ m))
| (n < 12) && (m == 7) = Dim (faIntToQual ((n + 1) ::+ m))
| (n ::+ m) == (12 ::+ 7) = Perf
| (n > 12) && (m == 7) = Aug (faIntToQual ((n - 1) ::+ m))
-- note: these last two cases *have* to be this way round, otherwise
-- infinite loop occurs.
| (n > 12) || (m > 7) = faIntToQual ((n - 12) ::+ (m - 7))
| (n < 0) || (m < 0) = faIntToQual ((-n) ::+ (-m))
instance Interval AbstractInt2 where
add (AbstractInt2 q n) (AbstractInt2 p m) = toInterval $ (faInt q n) + (faInt p m)
sub (AbstractInt2 q n) (AbstractInt2 p m) = toInterval $ (faInt q n) - (faInt p m)
augment (AbstractInt2 q n) = toInterval $ (faInt q n) + (1 ::+ 0)
diminish (AbstractInt2 q n) = toInterval $ (faInt q n) - (1 ::+ 0)
grow (AbstractInt2 q n) = toInterval $ (faInt q n) + (1 ::+ 1)
shrink (AbstractInt2 q n) = toInterval $ (faInt q n) - (1 ::+ 1)
octave = AbstractInt2 Perf (Compound Unison)
unison = AbstractInt2 Perf Unison
instance Eq AbstractInt2 where
(==) = (==) `under` intToFa
instance Interval AbstractInt3 where
add (AbstractInt3 f) (AbstractInt3 g) = AbstractInt3 (f*g)
sub (AbstractInt3 f) (AbstractInt3 g) = AbstractInt3 (f/g)
augment (AbstractInt3 f) = AbstractInt3 (f * (1 + 100*cent))
diminish (AbstractInt3 f) = AbstractInt3 (f * (1 - 100*cent))
grow (AbstractInt3 f) = AbstractInt3 (f * (1 + 200*cent))
shrink (AbstractInt3 f) = AbstractInt3 (f * (1 - 200*cent))
unison = AbstractInt3 1
octave = AbstractInt3 2
instance Transpose AbstractPitch1 AbstractInt1 where
transpose (AbstractInt1 s f') (AbstractPitch1 d f) = AbstractPitch1 (toEnum $ (fromEnum s) + (fromEnum d)) (addFicta f' f)
interval (AbstractPitch1 d _) (AbstractPitch1 d' _) = AbstractInt1 (toEnum $ (fromEnum d') - (fromEnum d)) Neutral
normalise (AbstractPitch1 d f) (AbstractInt1 s _) (AbstractPitch1 e g)
| s < (Com Fir) = undefined
| (current >= lower) && (current < upper) = AbstractPitch1 d f
| current < lower = normalise (AbstractPitch1 d f) (AbstractInt1 s Neutral) (transpose (AbstractInt1 (Com Fir) Neutral) (AbstractPitch1 e g))
| otherwise = normalise (AbstractPitch1 d f) (AbstractInt1 s Neutral) (transpose (AbstractInt1 (Neg (Com Fir)) Neutral) (AbstractPitch1 e g))
where lower = fromEnum d
upper = lower + (fromEnum s)
current = fromEnum e
-- instance Transpose Figuring AbstractInt1 where
-- transpose = (^+^)
-- interval = (^-^)
faPitch :: Name -> Accidental -> FreeAbelian
-- Pitches as elements of the rank-2 free Abelian group -- however,
-- this is only as an implementation detail: all pitches are
-- explicitly measured as intervals relative to middle-A-natural. To
-- the user, pitches still form an affine space.
faPitch A Na = 0 ::+ 0
faPitch B Na = (faPitch A Na) + (faInt Maj Second)
faPitch C Na = (faPitch B Na) + (faInt Min Second)
faPitch D Na = (faPitch C Na) + (faInt Maj Second)
faPitch E Na = (faPitch D Na) + (faInt Maj Second)
faPitch F Na = (faPitch E Na) + (faInt Min Second)
faPitch G Na = (faPitch F Na) + (faInt Maj Second)
faPitch (Up n) Na = (faPitch n Na) + (faInt Perf (Compound Unison))
faPitch (Down n) Na = (faPitch n Na) - (faInt Perf (Compound Unison))
faPitch n (Fl a) = (faPitch n a) - (1 ::+ 0)
faPitch n (Sh a) = (faPitch n a) + (1 ::+ 0)
toPitch :: FreeAbelian -> AbstractPitch2
-- toPitch (n ::+ m) = AbstractPitch2 (toEnum m) (faToAcc (n ::+ m))
toPitch (n ::+ m) = AbstractPitch2 (toEnum m) ((faToAcc . faNorm) (n ::+ m))
pitchToFa (AbstractPitch2 n a) = faPitch n a
oct m = m `div` 7
faNorm (n ::+ m) = (n - (12 * (oct m))) ::+ (m `mod` 7)
-- Only for pitches the lie within the span of one octave above
-- middle-A (e.g. that have been normalised by faNorm).
faToAcc (n ::+ m)
| (n < 0) && (m == 0) = Fl (faToAcc ((n + 1) ::+ m))
| (n ::+ m) == (0 ::+ 0) = Na
| (n > 0) && (m == 0) = Sh (faToAcc ((n - 1) ::+ m))
| (n < 2) && (m == 1) = Fl (faToAcc ((n + 1) ::+ m))
| (n ::+ m) == (2 ::+ 1) = Na
| (n > 2) && (m == 1) = Sh (faToAcc ((n - 1) ::+ m))
| (n < 3) && (m == 2) = Fl (faToAcc ((n + 1) ::+ m))
| (n ::+ m) == (3 ::+ 2) = Na
| (n > 3) && (m == 2) = Sh (faToAcc ((n - 1) ::+ m))
| (n < 5) && (m == 3) = Fl (faToAcc ((n + 1) ::+ m))
| (n ::+ m) == (5 ::+ 3) = Na
| (n > 5) && (m == 3) = Sh (faToAcc ((n - 1) ::+ m))
| (n < 7) && (m == 4) = Fl (faToAcc ((n + 1) ::+ m))
| (n ::+ m) == (7 ::+ 4) = Na
| (n > 7) && (m == 4) = Sh (faToAcc ((n - 1) ::+ m))
| (n < 8) && (m == 5) = Fl (faToAcc ((n + 1) ::+ m))
| (n ::+ m) == (8 ::+ 5) = Na
| (n > 8) && (m == 5) = Sh (faToAcc ((n - 1) ::+ m))
| (n < 10) && (m == 6) = Fl (faToAcc ((n + 1) ::+ m))
| (n ::+ m) == (10 ::+ 6) = Na
| (n > 10) && (m == 6) = Sh (faToAcc ((n - 1) ::+ m))
| (n < 12) && (m == 7) = Fl (faToAcc ((n + 1) ::+ m))
| (n ::+ m) == (12 ::+ 7) = Na
| (n > 12) && (m == 7) = Sh (faToAcc ((n - 1) ::+ m))
instance Transpose AbstractPitch2 AbstractInt2 where
transpose (AbstractInt2 q i) (AbstractPitch2 n a) = toPitch $ (faPitch n a) + (faInt q i)
interval (AbstractPitch2 n a) (AbstractPitch2 p b) = toInterval $ (faPitch p b) - (faPitch n a)
normalise base diff n
| diff < (AbstractInt2 Maj Seventh) = undefined
| (n >= base) && (n < upper) = n
| n < base = normalise base diff (n .+^ octave)
| otherwise = normalise base diff (n .-^ octave)
where upper = base .+^ diff
instance Transpose AbstractPitch3 AbstractInt3 where
transpose (AbstractInt3 i) (AbstractPitch3 f) = AbstractPitch3 (f * i)
interval (AbstractPitch3 f) (AbstractPitch3 g) = AbstractInt3 (g / f)
normalise (AbstractPitch3 f) (AbstractInt3 i) (AbstractPitch3 g)
| upper/lower <= 0.5 = undefined
| (current >= lower) && (current < upper) = AbstractPitch3 g
| current < lower = normalise (AbstractPitch3 f) (AbstractInt3 i) (AbstractPitch3 (g*2))
| otherwise = normalise (AbstractPitch3 f) (AbstractInt3 i) (AbstractPitch3 (g/2))
where lower = f
upper = f * i
current = g
instance Duration AbstractDur1 where
unit = AbstractDur1 Br
zeroD = error "zero-length mensuration duration not implemented yet"
combine (AbstractDur1 a) (AbstractDur1 b) = AbstractDur1 $ MTie a b
subD = error "cannot subtract mensurations yet"
instance Duration AbstractDur2 where
unit = AbstractDur2 (1 % 1)
zeroD = AbstractDur2 (0 % 1)
-- combine (AbstractDur2 (nd -> (0, _))) _ = error "no zero durations!"
-- combine _ (AbstractDur2 (nd -> (0, _))) = error "no zero durations!"
combine (AbstractDur2 r) (AbstractDur2 s) = AbstractDur2 (r + s)
subD (AbstractDur2 r) (AbstractDur2 s) = AbstractDur2 (r - s)
showDur (AbstractDur2 (nd -> (2, 1))) = "𝅜"
showDur (AbstractDur2 (nd -> (1, 1))) = "𝅝"
showDur (AbstractDur2 (nd -> (1, 2))) = "𝅗𝅥"
showDur (AbstractDur2 (nd -> (1, 4))) = "𝅘𝅥"
showDur (AbstractDur2 (nd -> (1, 8))) = "𝅘𝅥𝅮"
showDur (AbstractDur2 (nd -> (1, 16))) = "𝅘𝅥𝅯"
showDur (AbstractDur2 (nd -> (1, 32))) = "𝅘𝅥𝅰"
showDur (AbstractDur2 (nd -> (1, 64))) = "𝅘𝅥𝅱"
showDur (AbstractDur2 (nd -> (1, 128))) = "𝅘𝅥𝅲"
showDur (AbstractDur2 r) = show r
showRest (AbstractDur2 (nd -> (2, 1))) = "𝄺"
showRest (AbstractDur2 (nd -> (1, 1))) = "𝄻"
showRest (AbstractDur2 (nd -> (1, 2))) = "𝄼"
showRest (AbstractDur2 (nd -> (1, 4))) = "𝄽"
showRest (AbstractDur2 (nd -> (1, 8))) = "𝄾"
showRest (AbstractDur2 (nd -> (1, 16))) = "𝄿"
showRest (AbstractDur2 (nd -> (1, 32))) = "𝅀"
showRest (AbstractDur2 (nd -> (1, 64))) = "𝅁"
showRest (AbstractDur2 (nd -> (1, 128))) = "𝅂"
showRest (AbstractDur2 r) = show r
instance Duration AbstractDur3 where
zeroD = AbstractDur3 0
unit = AbstractDur3 1
-- combine (AbstractDur3 0) _ = error "no zero durations!"
-- combine _ (AbstractDur3 0) = error "no zero durations!"
combine (AbstractDur3 t) (AbstractDur3 r) = AbstractDur3 (t + r)
subD (AbstractDur3 t) (AbstractDur3 r) = AbstractDur3 (t - r)
-- Durations are a semigroup because zero-length durations are
-- forbidden.
instance (Duration d) => Semigroup d where
(<>) = combine
instance Timing Metronome AbstractDur2 where
time (Metronome n) (AbstractDur2 r) = AbstractDur3 (realToFrac ((240000 % n) * r))
-- time (Metronome n) (AbstractDur2 r) = AbstractDur3 (240000/(fromIntegral n) * (fromRational r))
-- todo: check this calculation
-- 60 bpm = 15 sbpm
-- 1 sb = 60s/15 = 60s/(bpm/4) = 60000 ms / (bpm/4)
-- An example function to use with mapPhrase
sharpenAndDouble :: Note p i d => AbstractNote p i d -> AbstractNote p i d
sharpenAndDouble = (apPitch sharpen) . (apInt augment) . (apDur (\d -> combine d d))
mapPhrase :: (Note p i d, Note p' i' d')
=> (AbstractNote p i d -> AbstractNote p' i' d')
-> AbstractPhrase (AbstractNote p i d) -> AbstractPhrase (AbstractNote p' i' d')
mapPhrase f (AbstractPhrase ((Conn p):[])) = AbstractPhrase [Conn (mapPhrase f p)]
mapPhrase f (AbstractPhrase (n:[])) = AbstractPhrase [f n]
mapPhrase f (AbstractPhrase ((Conn p):ns)) = (AbstractPhrase [Conn (mapPhrase f p)]) <> (mapPhrase f (AbstractPhrase ns))
mapPhrase f (AbstractPhrase (n:ns)) = (AbstractPhrase [f n]) <> (mapPhrase f (AbstractPhrase ns))
-- mapPhrase without recursion into sub-phrases
mapPhraseSingle :: (Note p i d)
=> (AbstractNote p i d -> AbstractNote p i d)
-> AbstractPhrase (AbstractNote p i d) -> AbstractPhrase (AbstractNote p i d)
mapPhraseSingle f (AbstractPhrase ((Conn p):[])) = AbstractPhrase [Conn (mapPhraseSingle f p)]
mapPhraseSingle f (AbstractPhrase (n:[])) = AbstractPhrase [f n]
mapPhraseSingle f (AbstractPhrase ((Conn p):ns)) = (AbstractPhrase [Conn p]) <> (mapPhraseSingle f (AbstractPhrase ns))
mapPhraseSingle f (AbstractPhrase (n:ns)) = (AbstractPhrase [f n]) <> (mapPhraseSingle f (AbstractPhrase ns))
-- by analogy with fold1
foldPhrase1 :: Note p i d
=> (AbstractNote p i d -> AbstractNote p i d -> AbstractNote p i d)
-> AbstractPhrase (AbstractNote p i d) -> AbstractNote p i d
foldPhrase1 f (AbstractPhrase (n:[])) =
case n of (Conn p) -> foldPhrase1 f p
p -> p
foldPhrase1 f (AbstractPhrase (n:ns)) =
case n of (Conn p) -> f (foldPhrase1 f p) (foldPhrase1 f (AbstractPhrase ns))
(Dir _) -> foldPhrase1 f (AbstractPhrase ns)
p -> f p (foldPhrase1 f (AbstractPhrase ns))
foldPhrase1 _ _ = error "Exhausted patterns in foldPhrase1"
foldPhrase :: Note p i d
=> (AbstractNote p i d -> a -> a)
-> a -> AbstractPhrase (AbstractNote p i d) -> a
foldPhrase f e (AbstractPhrase (n:[])) =
case n of (Conn p) -> foldPhrase f e p
p -> f p e
foldPhrase f e (AbstractPhrase (n:ns)) =
case n of (Conn p) -> let branch1 = foldPhrase f e (AbstractPhrase ns)
branch2 = foldPhrase f branch1 p
in branch2
(Dir _) -> foldPhrase f e (AbstractPhrase ns)
p -> f p (foldPhrase f e (AbstractPhrase ns))
flattenPhrase :: (AbstractPhrase t) -> (AbstractPhrase t)
flattenPhrase p@(AbstractPhrase (_:[])) = p
flattenPhrase (AbstractPhrase (n:ns)) =
case n of (Conn p) -> p <> (flattenPhrase (AbstractPhrase ns))
q -> (AbstractPhrase [q]) <> (flattenPhrase (AbstractPhrase ns))
flattenPhrase (AbstractPhrase []) = AbstractPhrase []
-- foldPhrase1 with *no* recursion into connected phrases -- they're simply ignored.
foldPhraseSingle :: Note p i d
=> (AbstractNote p i d -> AbstractNote p i d -> AbstractNote p i d)
-> AbstractPhrase (AbstractNote p i d) -> AbstractNote p i d
foldPhraseSingle f (AbstractPhrase p) = foldPhrase' f (AbstractPhrase (filter (not . isConn) p)) where
foldPhrase' :: Note p i d
=> (AbstractNote p i d -> AbstractNote p i d -> AbstractNote p i d)
-> AbstractPhrase (AbstractNote p i d) -> AbstractNote p i d
foldPhrase' f (AbstractPhrase (n:(Conn _):[])) = n
foldPhrase' f (AbstractPhrase (n:(Dir _):[])) = n
foldPhrase' f (AbstractPhrase (n:[])) = n
foldPhrase' f (AbstractPhrase ((Conn _):ns)) = foldPhrase' f (AbstractPhrase ns)
foldPhrase' f (AbstractPhrase ((Dir _):ns)) = foldPhrase' f (AbstractPhrase ns)
foldPhrase' f (AbstractPhrase (n:(Conn _):ns)) = f n (foldPhrase' f (AbstractPhrase ns))
foldPhrase' f (AbstractPhrase (n:(Dir _):ns)) = f n (foldPhrase' f (AbstractPhrase ns))
foldPhrase' f (AbstractPhrase (n:ns)) = f n (foldPhrase' f (AbstractPhrase ns))
foldPhrase' _ p = error ("Exhausted patterns in foldPhraseSingle: " ++ (show p))
-- extractDur :: (Note p i d) => (AbstractNote p i d) -> d
extractDur (AbstractPitch _ d) = d
extractDur (AbstractInt _ d) = d
extractDur (Rest d) = d
extractDur _ = zeroD
-- extractDur p = error ("Trying to extract duration from value with no duration: " ++ (show p))
-- extractPitch :: (Note p i d) => (AbstractNote p i d) -> p
extractPitch (AbstractPitch p _) = p
extractPitch p = error ("Trying to extract duration from value with no duration: " ++ (show p))
countDurs :: Note p i d => AbstractPhrase (AbstractNote p i d) -> d
countDurs (AbstractPhrase []) = zeroD
countDurs (AbstractPhrase p) = extractDur (countDurs' (AbstractPhrase (filter isNote p))) where
countDurs' (AbstractPhrase []) = Rest zeroD
countDurs' p = foldPhraseSingle (\n n' -> Rest $ (extractDur n) <> (extractDur n')) p
countDursRec :: Note p i d => AbstractPhrase (AbstractNote p i d) -> d
countDursRec (AbstractPhrase []) = zeroD
countDursRec (AbstractPhrase p) = extractDur (countDurs' (AbstractPhrase (filter isNote p))) where
countDurs' (AbstractPhrase []) = Rest zeroD
countDurs' p = foldPhrase1 (\n n' -> Rest $ (extractDur n) <> (extractDur n')) p
splitVoices :: (Note p i d) =>
AbstractPhrase (AbstractNote p i d) -> [AbstractPhrase (AbstractNote p i d)]
-- Walk along phrase until we find a connection to another phrase;
-- split off and insert correct number of rests in front of new
-- phrase. Repeat this procedure on all newly-discovered phrases,
-- recursively.
splitVoices (AbstractPhrase ns) =
let before = takeWhile (not . isConn) ns
after' = dropWhile (not . isConn) ns
connector = head after'
after = tail after'
rest = AbstractPhrase $ [Rest $ countDurs (AbstractPhrase before)]
in if null after'
then [AbstractPhrase before]
else case connector of
(Conn p) -> (splitVoices (AbstractPhrase (before ++ after))) ++ (splitVoices (if null before
then p
else (rest <> p)))
firstPitch :: [AbstractNote p i d] -> Maybe (AbstractNote p i d)
firstPitch ((AbstractPitch p d):ns) = Just $ AbstractPitch p d
firstPitch ((AbstractInt _ _):ns) = Nothing -- error "Cannot make phrase absolute if it has relative pitches before absolute pitches."
firstPitch [] = Nothing
firstPitch (_:ns) = firstPitch ns
-- todo: rewrite absolute in a foldPhrase/countDurs style.
absolute :: (Note p i d) =>
AbstractPhrase (AbstractNote p i d) -> AbstractPhrase (AbstractNote p i d)
absolute (AbstractPhrase ns) = AbstractPhrase (absolute' n ns)
where n = case firstPitch ns of
Just n' -> n'
Nothing -> error "arrrgh"
absolute' _ ((AbstractPitch p d):notes) = (AbstractPitch p d) : (absolute' (AbstractPitch p d) notes)
absolute' base@(AbstractPitch p _) ((AbstractInt i d):notes) = (AbstractPitch (transpose i p) d) : (absolute' (AbstractPitch (transpose i p) d) notes)
absolute' base ((Conn (AbstractPhrase p)):notes) = let remaining = absolute' base notes
base' = case firstPitch remaining of
Just n' -> n'
Nothing -> error "arrrgh"
connected = (Conn (AbstractPhrase (absolute' base' p)))
in connected : remaining
absolute' base (p:notes) = p : (absolute' base notes)
absolute' _ [] = []
-- Apply a function just to absolute pitches
apPitch f (AbstractPitch p d) = AbstractPitch (f p) d
apPitch _ p = p
-- Apply a function just to relative pitches
apInt f (AbstractInt p d) = AbstractInt (f p) d
apInt _ p = p
-- Apply a function just to rests
apRest f (Rest d) = Rest (f d)
apRest _ p = p
-- Transpose pitches, ignore everything else
apTran i (AbstractPitch p d) = AbstractPitch (p .+^ i) d
apTran i (AbstractInt p d) = AbstractInt (p ^+^ i) d
apTran _ p = p
-- Manipulate durations, ignore everything else
apDur f (AbstractPitch p d) = AbstractPitch p (f d)
apDur f (AbstractInt p d) = AbstractInt p (f d)
apDur f (Rest d) = Rest (f d)
apDur _ p = p
--------
instance Show (AbstractNote p i d) => Show (AbstractPhrase (AbstractNote p i d)) where
show (AbstractPhrase x) = show x
-- -- Phrases are a semigroup because phrases containing zero notes are forbidden.
-- instance (Note p i d) => Semigroup (AbstractPhrase (AbstractNote p i d)) where
-- (AbstractPhrase []) <> _ = error "no empty phrases!"
-- _ <> (AbstractPhrase []) = error "no empty phrases!"
-- (AbstractPhrase a) <> (AbstractPhrase b) = AbstractPhrase $ a ++ b
-- Relaxing this condition for now, as PhraseContext needs to be able
-- to manipulate zero-length phrases.
instance (Note p i d) => Semigroup (AbstractPhrase (AbstractNote p i d)) where
(AbstractPhrase []) <> p = p
p <> (AbstractPhrase []) = p
(AbstractPhrase a) <> (AbstractPhrase b) = AbstractPhrase $ a ++ b
emptyPhrase :: (Note p i d) => AbstractPhrase (AbstractNote p i d)
emptyPhrase = AbstractPhrase []
repeatPhrase 0 _ = error "no empty phrases!" -- hmmmm
repeatPhrase 1 p = p
repeatPhrase n p = p <> (repeatPhrase (n - 1) p)
explodeVoices :: (Note p i d) => Music (AbstractNote p i d) -> Music (AbstractNote p i d)
explodeVoices (Start p) = Voices $ filter (/= emptyPhrase) $ splitVoices p
explodeVoices (Voices ps) = Voices $ concatMap splitVoices ps
revVoices (Voices ps) = Voices $ reverse ps
revVoices m = m
-- Apply a tuning system and a timing to a note, simultaneously. Use
-- with mapPhrase to turn AbstractNote2s into AbstractNote3s.
----noteToSound :: (Note p i d, Tuning t p i, Timing t' d) => t -> t' -> AbstractNote p i d -> Note3
----noteToSound tuning timing (AbstractPitch p d) = AbstractPitch (tune tuning p) (time timing d)
----noteToSound _ timing (Rest d) = Rest (time timing d)
----noteToSound _ timing (Dir d) = (Dir d)
noteToSound :: (Note p i d, Tuning t p i, Timing t' d) => t -> t' -> AbstractNote p i d -> Note3
noteToSound tu ti = (timeNote ti) . (tuneNote tu)
mapMusic :: (Note p i d, Note p' i' d')
=> (AbstractPhrase (AbstractNote p i d) ->
AbstractPhrase (AbstractNote p' i' d')) ->
Music (AbstractNote p i d) -> Music (AbstractNote p' i' d')
-- Apply a function to all phrases in a piece of music.
mapMusic f (Start p) = Start $ f p
mapMusic f (Voices ps) = Voices $ map f ps
instance (Note p i d) => Semigroup (Music (AbstractNote p i d)) where
(Voices v) <> (Voices v') = Voices $ uniq (v ++ v')
(Voices v) <> (Start p) = (Voices v) <> (explodeVoices (Start p))
(Start p) <> (Voices v) = (explodeVoices (Start p)) <> (Voices v)
(Start p) <> (Start p') = Start $ (phrase [(Conn p')]) <> p
-- Function to produce a chord
chord :: (Note p i d) => d -> [p] -> AbstractPhrase (AbstractNote p i d)
chord _ [] = error "no empty phrases!"
chord d (p:[]) = phrase [note p d]
chord d (p:ps) = let r = chord d ps
in phrase [Conn r, note p d]
-- remove duration from note
dropDur :: (Note p i d) => d -> (AbstractNote p i d) -> (AbstractNote p i d)
dropDur d n = let d' = extractDur n
in if d' < d
then apDur (\_ -> zeroD) n
else apDur (\_ -> subD d' d) n
-- remove a certain duration of notes from beginning of phrase
dropPhrase :: (Note p i d) => d -> AbstractPhrase (AbstractNote p i d) -> AbstractPhrase (AbstractNote p i d)
dropPhrase d p@(AbstractPhrase (x:xs))
| d == zeroD = p
| d >= extractDur x = dropPhrase (subD d (extractDur x)) (AbstractPhrase xs)
| otherwise = AbstractPhrase ((dropDur d x):xs)
dropPhrase _ (AbstractPhrase []) = AbstractPhrase []
reversePhrase :: (Note p i d) => AbstractPhrase (AbstractNote p i d) -> AbstractPhrase (AbstractNote p i d)
reversePhrase (AbstractPhrase p) = AbstractPhrase (reverse p)
dropPhraseEnd :: (Note p i d) => d -> AbstractPhrase (AbstractNote p i d) -> AbstractPhrase (AbstractNote p i d)
dropPhraseEnd d = reversePhrase . (dropPhrase d) . reversePhrase
| ejlilley/AbstractMusic | Music.hs | gpl-3.0 | 44,845 | 0 | 17 | 11,040 | 17,275 | 8,943 | 8,332 | 787 | 9 |
module P20StateTaxSpec (main,spec) where
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck((==>))
import P20StateTax hiding (main)
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "getTaxAmt" $ do
prop "has the property of returning 0.005 times amount for wisconsin, eau clair" $
\amt -> getTaxAmt "wisconsin" "eau clair" amt `shouldBe` amt * 0.005
prop "has the property of returning 0.005 times amount for wn, eau clair" $
\amt -> getTaxAmt "wn" "eau clair" amt `shouldBe` amt * 0.005
prop "has the property of returning 0.004 times amount for wisconsin, dunn" $
\amt -> getTaxAmt "wisconsin" "dunn" amt `shouldBe` amt * 0.004
prop "has the property of returning 0.004 times amount for wn, dunn" $
\amt -> getTaxAmt "wn" "dunn" amt `shouldBe` amt * 0.004
prop "has the property of returning 0.08 times amount for illinois, (anything)" $
\amt county-> getTaxAmt "illinois" county amt `shouldBe` amt * 0.08
prop "has the property of returning 0.08 times amount for il, (anything)" $
\amt county -> getTaxAmt "il" county amt `shouldBe` amt * 0.08
prop "has the property of returning 0.2 times amount for oxfordshire, (anything)" $
\amt area -> getTaxAmt "oxfordshire" area amt `shouldBe` amt * 0.2
prop "has the property of returning 0.2 times amount for oxon, (anything)" $
\amt area -> getTaxAmt "oxon" area amt `shouldBe` amt * 0.2
prop "has the property of returning 0 for all other places" $
\amt state county ->
state `notElem` [ "wisconsin"
, "wn"
, "illinois"
, "il"
, "oxfordshire"
, "oxon" ] ==> getTaxAmt state county amt `shouldBe` 0
describe "getTaxAmt'" $ do
prop "has the property of returning 0.005 times amount for wisconsin, eau clair" $
\amt -> getTaxAmt' "wisconsin" "eau clair" amt `shouldBe` amt * 0.005
prop "has the property of returning 0.005 times amount for wn, eau clair" $
\amt -> getTaxAmt' "wn" "eau clair" amt `shouldBe` amt * 0.005
prop "has the property of returning 0.004 times amount for wisconsin, dunn" $
\amt -> getTaxAmt' "wisconsin" "dunn" amt `shouldBe` amt * 0.004
prop "has the property of returning 0.004 times amount for wn, dunn" $
\amt -> getTaxAmt' "wn" "dunn" amt `shouldBe` amt * 0.004
prop "has the property of returning 0.08 times amount for illinois, (anything)" $
\amt county-> getTaxAmt' "illinois" county amt `shouldBe` amt * 0.08
prop "has the property of returning 0.08 times amount for il, (anything)" $
\amt county -> getTaxAmt' "il" county amt `shouldBe` amt * 0.08
prop "has the property of returning 0.2 times amount for oxfordshire, (anything)" $
\amt area -> getTaxAmt' "oxfordshire" area amt `shouldBe` amt * 0.2
prop "has the property of returning 0.2 times amount for oxon, (anything)" $
\amt area -> getTaxAmt' "oxon" area amt `shouldBe` amt * 0.2
prop "has the property of returning 0 for all other places" $
\amt state county ->
state `notElem` [ "wisconsin"
, "wn"
, "illinois"
, "il"
, "oxfordshire"
, "oxon" ] ==> getTaxAmt' state county amt `shouldBe` 0
prop "has property of giving identical results to getTaxAmt for all inputs" $
\a s c -> getTaxAmt a s c `shouldBe` getTaxAmt' a s c
describe "getTaxRateFor" $ do
it "gives zero rate for unknown state" $ do
getTaxRateFor "blarb" taxLookup `shouldBe` (Rate 0)
it "gives correct rate for illinois" $ do
getTaxRateFor "illinois" taxLookup `shouldBe` (Rate 0.08)
it "gives correct rate for illinois" $ do
getTaxRateFor "illinois" taxLookup `shouldBe` (Rate 0.08)
it "gives correct rate for dunn, wisconsin" $ do
let counties = getTaxRateFor "wn" taxLookup
getTaxRateFor "dunn" counties `shouldBe` (Rate 0.004)
it "gives correct rate for bumblefuck, wisconsin" $ do
let counties = getTaxRateFor "wisconsin" taxLookup
getTaxRateFor "eau clair" counties `shouldBe` (Rate 0.005)
it "gives correct rate for bumblefuck, wisconsin" $ do
let counties = getTaxRateFor "wn" taxLookup
getTaxRateFor "bumblefuck" counties `shouldBe` (Rate 0)
| ciderpunx/57-exercises-for-programmers | test/P20StateTaxSpec.hs | gpl-3.0 | 4,595 | 0 | 16 | 1,337 | 1,011 | 509 | 502 | 77 | 1 |
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings, DeriveFunctor #-}
module Graphics.UI.Bottle.Widgets.EventMapDoc
( makeView
, IsHelpShown(..)
, addHelp
, makeToggledHelpAdder
, Config(..)
) where
import Prelude.Compat
import qualified Control.Lens as Lens
import Control.Lens.Operators
import Control.Lens.Tuple
import Data.Function (on)
import Data.IORef (newIORef, readIORef, modifyIORef)
import qualified Data.Map as Map
import qualified Data.Tuple as Tuple
import Data.Vector.Vector2 (Vector2(..))
import qualified Graphics.DrawingCombinators as Draw
import Graphics.UI.Bottle.Animation (AnimId, R)
import qualified Graphics.UI.Bottle.Animation as Anim
import Graphics.UI.Bottle.EventMap (EventMap)
import qualified Graphics.UI.Bottle.EventMap as E
import Graphics.UI.Bottle.ModKey (ModKey(..))
import qualified Graphics.UI.Bottle.ModKey as ModKey
import Graphics.UI.Bottle.View (View(..))
import qualified Graphics.UI.Bottle.View as View
import Graphics.UI.Bottle.Widget (Widget)
import qualified Graphics.UI.Bottle.Widget as Widget
import qualified Graphics.UI.Bottle.Widgets.GridView as GridView
import qualified Graphics.UI.Bottle.Widgets.Spacer as Spacer
import qualified Graphics.UI.Bottle.Widgets.TextView as TextView
data Config = Config
{ configStyle :: TextView.Style
, configInputDocColor :: Draw.Color
, configBGColor :: Draw.Color
, configOverlayDocKeys :: [ModKey]
}
data Tree n l = Leaf l | Branch n [Tree n l]
deriving (Eq, Ord, Show, Functor)
bitraverseTree :: Applicative f => (n0 -> f n1) -> (l0 -> f l1) -> Tree n0 l0 -> f (Tree n1 l1)
bitraverseTree _ onLeaf (Leaf l) = Leaf <$> onLeaf l
bitraverseTree onNode onLeaf (Branch n ts) = Branch <$> onNode n <*> traverse (bitraverseTree onNode onLeaf) ts
treeNodes :: Lens.Traversal (Tree n0 l) (Tree n1 l) n0 n1
treeNodes = (`bitraverseTree` pure)
groupTree :: Eq node => [([node], leaf)] -> [Tree node leaf]
groupTree = foldr step []
where
step ([], l) rest = Leaf l : rest
step (at:as, l) b =
case b of
Branch bt bs : rest
| at == bt -> Branch bt (step (as, l) bs) : rest
_ -> Branch at (step (as, l) []) : b
-- We also rely on Map.toList returning a sorted list
groupInputDocs :: [([E.Subtitle], E.InputDoc)] -> [([E.Subtitle], [E.InputDoc])]
groupInputDocs = Map.toList . Map.fromListWith (++) . (Lens.traversed . _2 %~) (:[])
addAnimIds :: (Show a, Show b) => AnimId -> Tree a b -> Tree (AnimId, a) (AnimId, b)
addAnimIds animId (Leaf b) = Leaf (animId ++ ["leaf"], b)
addAnimIds animId (Branch a cs) =
Branch (tAnimId, a) $ map (addAnimIds tAnimId) cs
where
tAnimId = View.augmentAnimId animId a
makeShortcutKeyView ::
Config -> (AnimId, [E.InputDoc]) -> View
makeShortcutKeyView config (animId, inputDocs) =
inputDocs
<&> TextView.label (configStyle config) animId
<&> View.animFrame . Anim.unitImages %~ Draw.tint (configInputDocColor config)
& GridView.verticalAlign 0
makeTextViews ::
Config -> AnimId ->
Tree E.Subtitle [E.InputDoc] ->
Tree View View
makeTextViews config =
fmap
( (treeNodes %~ uncurry (TextView.label (configStyle config)))
. fmap (makeShortcutKeyView config)
) . addAnimIds
addHelpBG :: Config -> AnimId -> View -> View
addHelpBG config animId =
View.backgroundColor animId 1 $ configBGColor config
columns :: R -> (a -> R) -> [a] -> [[a]]
columns maxHeight itemHeight =
combine . foldr step (0, [], [])
where
combine (_, curColumn, doneColumns) = curColumn : doneColumns
step new (curColumnHeight, curColumn, doneColumns)
| newHeight + curColumnHeight > maxHeight =
(newHeight, [new], curColumn : doneColumns)
| otherwise =
(newHeight + curColumnHeight, new : curColumn, doneColumns)
where
newHeight = itemHeight new
makeView :: Vector2 R -> EventMap a -> Config -> AnimId -> View
makeView size eventMap config animId =
makeTreeView config animId size .
map (makeTextViews config animId) . groupTree . groupInputDocs .
map ((_1 %~ (^. E.docStrs)) . Tuple.swap) $ E.eventMapDocs eventMap
makeTooltip :: Config -> [ModKey] -> AnimId -> View
makeTooltip config helpKeys animId =
addHelpBG config animId $
GridView.horizontalAlign 0
[ TextView.label (configStyle config) animId "Show help"
, Spacer.makeHorizontal 10
, makeShortcutKeyView config
(animId ++ ["HelpKeys"], map ModKey.pretty helpKeys)
]
indent :: R -> View -> View
indent width x =
GridView.horizontalAlign 0 [Spacer.makeHorizontal width, x]
makeTreeView :: Config -> AnimId -> Vector2 R -> [Tree View View] -> View
makeTreeView config animId size =
GridView.horizontalAlign 1 . (Lens.traversed %@~ toGrid) .
columns (size ^. _2) pairHeight .
handleResult . go
where
toGrid i =
addHelpBG config (View.augmentAnimId animId i) .
GridView.make . map toRow
toRow (titleView, docView) = [(0, titleView), (Vector2 1 0, docView)]
pairHeight (titleView, docView) = (max `on` (^. View.height)) titleView docView
handleResult (pairs, []) = pairs
handleResult _ = error "Leafs at root of tree!"
go = mconcat . map fromTree
fromTree (Leaf inputDocsView) = ([], [inputDocsView])
fromTree (Branch titleView trees) =
( (titleView, GridView.verticalAlign 1 inputDocs) :
(Lens.traversed . _1 %~ indent 10) titles
, [] )
where
(titles, inputDocs) = go trees
addHelp :: (AnimId -> View) -> Widget.Size -> Widget f -> Widget f
addHelp f size =
Widget.animFrame <>~ docFrame
where
View eventMapSize eventMapFrame = f ["help box"]
transparency = Draw.Color 1 1 1
docFrame =
eventMapFrame
& Anim.translate (size - eventMapSize)
& Anim.layers -~ 10 -- TODO: 10?!
& Anim.unitImages %~ Draw.tint (transparency 0.8) -- TODO: 0.8?!
data IsHelpShown = HelpShown | HelpNotShown
deriving (Eq, Ord, Read, Show)
toggle :: IsHelpShown -> IsHelpShown
toggle HelpShown = HelpNotShown
toggle HelpNotShown = HelpShown
makeToggledHelpAdder ::
IsHelpShown -> IO (Config -> Widget.Size -> Widget IO -> IO (Widget IO))
makeToggledHelpAdder startValue =
do
showingHelpVar <- newIORef startValue
return $ \config size widget ->
do
showingHelp <- readIORef showingHelpVar
let (f, docStr) =
case showingHelp of
HelpShown ->
(makeView size (widget ^. Widget.eventMap) config, "Hide" :: String)
HelpNotShown ->
(makeTooltip config (configOverlayDocKeys config), "Show")
toggleEventMap =
Widget.keysEventMap (configOverlayDocKeys config) (E.Doc ["Help", "Key Bindings", docStr]) $
modifyIORef showingHelpVar toggle
return . addHelp f size $
Widget.strongerEvents toggleEventMap widget
| rvion/lamdu | bottlelib/Graphics/UI/Bottle/Widgets/EventMapDoc.hs | gpl-3.0 | 7,343 | 0 | 21 | 1,927 | 2,339 | 1,273 | 1,066 | -1 | -1 |
import Data.List
import Data.Char
isPangram :: String -> Bool
isPangram str = size == 26
where size = length $ foldl (\y x -> if not (x `elem` y) && isAlpha x then [x] ++ y else y ) [] $ map toUpper str | yannxia-self/code-war-hasekell-training | detect_pangram.hs | gpl-3.0 | 207 | 0 | 14 | 48 | 104 | 55 | 49 | 5 | 2 |
module Estuary.Widgets.PatternChain where
import Reflex
import Reflex.Dom
import qualified Sound.Tidal.Context as Tidal
import Estuary.Tidal.Types
import Estuary.Reflex.Container
import Estuary.Widgets.Generic
import Estuary.Reflex.Utility
import Control.Monad
import Data.Map
-- from Estuary.Tidal.Types:
-- data PatternCombinator = Merge | Add | Subtract | Multiply | Divide deriving (Eq,Show)
-- data PatternChain = PatternChain TransformedPattern | PatternChain' TransformedPattern PatternCombinator PatternChain deriving (Eq)
trivialPatternChain :: MonadWidget t m => PatternChain -> Event t () -> m (Dynamic t (PatternChain, Event t GenericSignal))
trivialPatternChain iValue _ = do
x <- button' "just an S atom" $ PatternChain (TransformedPattern [] (S (Atom "bd" Once)))
el "br" blank
y <- button' "merge S and N" $ PatternChain' (TransformedPattern [] (S (Atom "hh" (Rep 4)))) Merge (PatternChain (TransformedPattern [] (nPatternFromList [0..3])))
el "br" blank
z <- button' "adding Ns" $ PatternChain' (TransformedPattern [] (N (Atom 60 Once))) Add (PatternChain (TransformedPattern [] (N (Atom 7 Once))))
el "br" blank
xyz <- holdDyn iValue $ leftmost [x,y,z]
mapDyn (\a -> (a,never)) xyz
{-
patternChainWidget :: MonadWidget t m => PatternChain -> Event t () -> m (Dynamic t (PatternChain, Event t GenericSignal))
patternChainWidget (PatternChain tp) _ = do
tpWidget <- trivialTransformedPattern
patternChainWidget (PatternChain' tp combinator next) _ = do
-}
| Moskau/estuary | Estuary/Widgets/PatternChain.hs | gpl-3.0 | 1,497 | 0 | 17 | 221 | 389 | 200 | 189 | 20 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Spanner.Projects.Instances.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates an instance and begins preparing it to begin serving. The
-- returned long-running operation can be used to track the progress of
-- preparing the new instance. The instance name is assigned by the caller.
-- If the named instance already exists, \`CreateInstance\` returns
-- \`ALREADY_EXISTS\`. Immediately upon completion of this request: * The
-- instance is readable via the API, with all requested attributes but no
-- allocated resources. Its state is \`CREATING\`. Until completion of the
-- returned operation: * Cancelling the operation renders the instance
-- immediately unreadable via the API. * The instance can be deleted. * All
-- other attempts to modify the instance are rejected. Upon completion of
-- the returned operation: * Billing for all successfully-allocated
-- resources begins (some types may have lower than the requested levels).
-- * Databases can be created in the instance. * The instance\'s allocated
-- resource levels are readable via the API. * The instance\'s state
-- becomes \`READY\`. The returned long-running operation will have a name
-- of the format \`\/operations\/\` and can be used to track creation of
-- the instance. The metadata field type is CreateInstanceMetadata. The
-- response field type is Instance, if successful.
--
-- /See:/ <https://cloud.google.com/spanner/ Cloud Spanner API Reference> for @spanner.projects.instances.create@.
module Network.Google.Resource.Spanner.Projects.Instances.Create
(
-- * REST Resource
ProjectsInstancesCreateResource
-- * Creating a Request
, projectsInstancesCreate
, ProjectsInstancesCreate
-- * Request Lenses
, picParent
, picXgafv
, picUploadProtocol
, picAccessToken
, picUploadType
, picPayload
, picCallback
) where
import Network.Google.Prelude
import Network.Google.Spanner.Types
-- | A resource alias for @spanner.projects.instances.create@ method which the
-- 'ProjectsInstancesCreate' request conforms to.
type ProjectsInstancesCreateResource =
"v1" :>
Capture "parent" Text :>
"instances" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] CreateInstanceRequest :>
Post '[JSON] Operation
-- | Creates an instance and begins preparing it to begin serving. The
-- returned long-running operation can be used to track the progress of
-- preparing the new instance. The instance name is assigned by the caller.
-- If the named instance already exists, \`CreateInstance\` returns
-- \`ALREADY_EXISTS\`. Immediately upon completion of this request: * The
-- instance is readable via the API, with all requested attributes but no
-- allocated resources. Its state is \`CREATING\`. Until completion of the
-- returned operation: * Cancelling the operation renders the instance
-- immediately unreadable via the API. * The instance can be deleted. * All
-- other attempts to modify the instance are rejected. Upon completion of
-- the returned operation: * Billing for all successfully-allocated
-- resources begins (some types may have lower than the requested levels).
-- * Databases can be created in the instance. * The instance\'s allocated
-- resource levels are readable via the API. * The instance\'s state
-- becomes \`READY\`. The returned long-running operation will have a name
-- of the format \`\/operations\/\` and can be used to track creation of
-- the instance. The metadata field type is CreateInstanceMetadata. The
-- response field type is Instance, if successful.
--
-- /See:/ 'projectsInstancesCreate' smart constructor.
data ProjectsInstancesCreate =
ProjectsInstancesCreate'
{ _picParent :: !Text
, _picXgafv :: !(Maybe Xgafv)
, _picUploadProtocol :: !(Maybe Text)
, _picAccessToken :: !(Maybe Text)
, _picUploadType :: !(Maybe Text)
, _picPayload :: !CreateInstanceRequest
, _picCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsInstancesCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'picParent'
--
-- * 'picXgafv'
--
-- * 'picUploadProtocol'
--
-- * 'picAccessToken'
--
-- * 'picUploadType'
--
-- * 'picPayload'
--
-- * 'picCallback'
projectsInstancesCreate
:: Text -- ^ 'picParent'
-> CreateInstanceRequest -- ^ 'picPayload'
-> ProjectsInstancesCreate
projectsInstancesCreate pPicParent_ pPicPayload_ =
ProjectsInstancesCreate'
{ _picParent = pPicParent_
, _picXgafv = Nothing
, _picUploadProtocol = Nothing
, _picAccessToken = Nothing
, _picUploadType = Nothing
, _picPayload = pPicPayload_
, _picCallback = Nothing
}
-- | Required. The name of the project in which to create the instance.
-- Values are of the form \`projects\/\`.
picParent :: Lens' ProjectsInstancesCreate Text
picParent
= lens _picParent (\ s a -> s{_picParent = a})
-- | V1 error format.
picXgafv :: Lens' ProjectsInstancesCreate (Maybe Xgafv)
picXgafv = lens _picXgafv (\ s a -> s{_picXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
picUploadProtocol :: Lens' ProjectsInstancesCreate (Maybe Text)
picUploadProtocol
= lens _picUploadProtocol
(\ s a -> s{_picUploadProtocol = a})
-- | OAuth access token.
picAccessToken :: Lens' ProjectsInstancesCreate (Maybe Text)
picAccessToken
= lens _picAccessToken
(\ s a -> s{_picAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
picUploadType :: Lens' ProjectsInstancesCreate (Maybe Text)
picUploadType
= lens _picUploadType
(\ s a -> s{_picUploadType = a})
-- | Multipart request metadata.
picPayload :: Lens' ProjectsInstancesCreate CreateInstanceRequest
picPayload
= lens _picPayload (\ s a -> s{_picPayload = a})
-- | JSONP
picCallback :: Lens' ProjectsInstancesCreate (Maybe Text)
picCallback
= lens _picCallback (\ s a -> s{_picCallback = a})
instance GoogleRequest ProjectsInstancesCreate where
type Rs ProjectsInstancesCreate = Operation
type Scopes ProjectsInstancesCreate =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/spanner.admin"]
requestClient ProjectsInstancesCreate'{..}
= go _picParent _picXgafv _picUploadProtocol
_picAccessToken
_picUploadType
_picCallback
(Just AltJSON)
_picPayload
spannerService
where go
= buildClient
(Proxy :: Proxy ProjectsInstancesCreateResource)
mempty
| brendanhay/gogol | gogol-spanner/gen/Network/Google/Resource/Spanner/Projects/Instances/Create.hs | mpl-2.0 | 7,716 | 0 | 17 | 1,599 | 817 | 491 | 326 | 115 | 1 |
-- This Source Code Form is subject to the terms of the Mozilla Public
-- License, v. 2.0. If a copy of the MPL was not distributed with this
-- file, You can obtain one at http://mozilla.org/MPL/2.0/.
{-# LANGUAGE CPP #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | A tuple represents the types of multiple cassandra columns. It is used
-- to check that column-types match.
module Database.CQL.Protocol.Tuple
( Tuple
, count
, check
, tuple
, store
) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
#endif
import Data.Serialize
import Data.Word
import Database.CQL.Protocol.Class
import Database.CQL.Protocol.Codec (putValue)
import Database.CQL.Protocol.Types
import Database.CQL.Protocol.Tuple.TH
genInstances 48
| whitehead1415/cql | src/Database/CQL/Protocol/Tuple.hs | mpl-2.0 | 851 | 0 | 5 | 160 | 91 | 63 | 28 | 18 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.FusionTables.Style.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets a specific style.
--
-- /See:/ <https://developers.google.com/fusiontables Fusion Tables API Reference> for @fusiontables.style.get@.
module Network.Google.Resource.FusionTables.Style.Get
(
-- * REST Resource
StyleGetResource
-- * Creating a Request
, styleGet
, StyleGet
-- * Request Lenses
, sgStyleId
, sgTableId
) where
import Network.Google.FusionTables.Types
import Network.Google.Prelude
-- | A resource alias for @fusiontables.style.get@ method which the
-- 'StyleGet' request conforms to.
type StyleGetResource =
"fusiontables" :>
"v2" :>
"tables" :>
Capture "tableId" Text :>
"styles" :>
Capture "styleId" (Textual Int32) :>
QueryParam "alt" AltJSON :> Get '[JSON] StyleSetting
-- | Gets a specific style.
--
-- /See:/ 'styleGet' smart constructor.
data StyleGet =
StyleGet'
{ _sgStyleId :: !(Textual Int32)
, _sgTableId :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'StyleGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sgStyleId'
--
-- * 'sgTableId'
styleGet
:: Int32 -- ^ 'sgStyleId'
-> Text -- ^ 'sgTableId'
-> StyleGet
styleGet pSgStyleId_ pSgTableId_ =
StyleGet' {_sgStyleId = _Coerce # pSgStyleId_, _sgTableId = pSgTableId_}
-- | Identifier (integer) for a specific style in a table
sgStyleId :: Lens' StyleGet Int32
sgStyleId
= lens _sgStyleId (\ s a -> s{_sgStyleId = a}) .
_Coerce
-- | Table to which the requested style belongs
sgTableId :: Lens' StyleGet Text
sgTableId
= lens _sgTableId (\ s a -> s{_sgTableId = a})
instance GoogleRequest StyleGet where
type Rs StyleGet = StyleSetting
type Scopes StyleGet =
'["https://www.googleapis.com/auth/fusiontables",
"https://www.googleapis.com/auth/fusiontables.readonly"]
requestClient StyleGet'{..}
= go _sgTableId _sgStyleId (Just AltJSON)
fusionTablesService
where go
= buildClient (Proxy :: Proxy StyleGetResource)
mempty
| brendanhay/gogol | gogol-fusiontables/gen/Network/Google/Resource/FusionTables/Style/Get.hs | mpl-2.0 | 2,990 | 0 | 14 | 708 | 404 | 241 | 163 | 62 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Analytics.Management.CustomMetrics.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists custom metrics to which the user has access.
--
-- /See:/ <https://developers.google.com/analytics/ Google Analytics API Reference> for @analytics.management.customMetrics.list@.
module Network.Google.Resource.Analytics.Management.CustomMetrics.List
(
-- * REST Resource
ManagementCustomMetricsListResource
-- * Creating a Request
, managementCustomMetricsList
, ManagementCustomMetricsList
-- * Request Lenses
, mcmlWebPropertyId
, mcmlAccountId
, mcmlStartIndex
, mcmlMaxResults
) where
import Network.Google.Analytics.Types
import Network.Google.Prelude
-- | A resource alias for @analytics.management.customMetrics.list@ method which the
-- 'ManagementCustomMetricsList' request conforms to.
type ManagementCustomMetricsListResource =
"analytics" :>
"v3" :>
"management" :>
"accounts" :>
Capture "accountId" Text :>
"webproperties" :>
Capture "webPropertyId" Text :>
"customMetrics" :>
QueryParam "start-index" (Textual Int32) :>
QueryParam "max-results" (Textual Int32) :>
QueryParam "alt" AltJSON :> Get '[JSON] CustomMetrics
-- | Lists custom metrics to which the user has access.
--
-- /See:/ 'managementCustomMetricsList' smart constructor.
data ManagementCustomMetricsList = ManagementCustomMetricsList'
{ _mcmlWebPropertyId :: !Text
, _mcmlAccountId :: !Text
, _mcmlStartIndex :: !(Maybe (Textual Int32))
, _mcmlMaxResults :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ManagementCustomMetricsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mcmlWebPropertyId'
--
-- * 'mcmlAccountId'
--
-- * 'mcmlStartIndex'
--
-- * 'mcmlMaxResults'
managementCustomMetricsList
:: Text -- ^ 'mcmlWebPropertyId'
-> Text -- ^ 'mcmlAccountId'
-> ManagementCustomMetricsList
managementCustomMetricsList pMcmlWebPropertyId_ pMcmlAccountId_ =
ManagementCustomMetricsList'
{ _mcmlWebPropertyId = pMcmlWebPropertyId_
, _mcmlAccountId = pMcmlAccountId_
, _mcmlStartIndex = Nothing
, _mcmlMaxResults = Nothing
}
-- | Web property ID for the custom metrics to retrieve.
mcmlWebPropertyId :: Lens' ManagementCustomMetricsList Text
mcmlWebPropertyId
= lens _mcmlWebPropertyId
(\ s a -> s{_mcmlWebPropertyId = a})
-- | Account ID for the custom metrics to retrieve.
mcmlAccountId :: Lens' ManagementCustomMetricsList Text
mcmlAccountId
= lens _mcmlAccountId
(\ s a -> s{_mcmlAccountId = a})
-- | An index of the first entity to retrieve. Use this parameter as a
-- pagination mechanism along with the max-results parameter.
mcmlStartIndex :: Lens' ManagementCustomMetricsList (Maybe Int32)
mcmlStartIndex
= lens _mcmlStartIndex
(\ s a -> s{_mcmlStartIndex = a})
. mapping _Coerce
-- | The maximum number of custom metrics to include in this response.
mcmlMaxResults :: Lens' ManagementCustomMetricsList (Maybe Int32)
mcmlMaxResults
= lens _mcmlMaxResults
(\ s a -> s{_mcmlMaxResults = a})
. mapping _Coerce
instance GoogleRequest ManagementCustomMetricsList
where
type Rs ManagementCustomMetricsList = CustomMetrics
type Scopes ManagementCustomMetricsList =
'["https://www.googleapis.com/auth/analytics",
"https://www.googleapis.com/auth/analytics.readonly"]
requestClient ManagementCustomMetricsList'{..}
= go _mcmlAccountId _mcmlWebPropertyId
_mcmlStartIndex
_mcmlMaxResults
(Just AltJSON)
analyticsService
where go
= buildClient
(Proxy :: Proxy ManagementCustomMetricsListResource)
mempty
| rueshyna/gogol | gogol-analytics/gen/Network/Google/Resource/Analytics/Management/CustomMetrics/List.hs | mpl-2.0 | 4,763 | 0 | 18 | 1,095 | 592 | 345 | 247 | 92 | 1 |
{-
Copyright (C) 2009 Andrejs Sisojevs <[email protected]>
All rights reserved.
For license and copyright information, see the file COPYRIGHT
-}
--------------------------------------------------------------------------
--------------------------------------------------------------------------
-- | Taken from Lucu-0.4 package. The differences from the original:
--
-- (1) this version exports everything,
--
-- (2) added stringLBS,
--
-- (3) - Data.ByteString.Lazy.UTF8.Unified instead or usual lazy
-- ByteString.
-- _________________________
-- Here are the original authors' (PHO) comments:
--
-- Yet another parser combinator. This is mostly a subset of
-- "Text.ParserCombinators.Parsec" but there are some differences:
--
-- * This parser works on 'Data.ByteString.Base.LazyByteString'
-- instead of 'Prelude.String'.
--
-- * Backtracking is the only possible behavior so there is no \"try\"
-- action.
--
-- * On success, the remaining string is returned as well as the
-- parser result.
--
-- * You can choose whether to treat reaching EOF (trying to eat one
-- more letter at the end of string) a fatal error or to treat it a
-- normal failure. If a fatal error occurs, the entire parsing
-- process immediately fails without trying any backtracks. The
-- default behavior is to treat EOF fatal.
--
-- In general, you don't have to use this module directly.
{-# LANGUAGE BangPatterns #-}
module Text.PCLT.Parser.ParserInternals where
import Control.Monad.State.Strict
import qualified Data.ByteString.Lazy.UTF8.Unified as Lazy (ByteString)
import qualified Data.ByteString.Lazy.UTF8.Unified as B hiding (ByteString)
import qualified Data.Foldable as Fold
import Data.Int
import qualified Data.Sequence as Seq
import Data.Sequence (Seq, (|>))
-- |@'Parser' a@ is obviously a parser which parses and returns @a@.
newtype Parser a = Parser {
runParser :: State ParserState (ParserResult a)
}
data ParserState
= PST {
pstInput :: Lazy.ByteString
, pstIsEOFFatal :: !Bool
}
deriving (Eq, Show)
data ParserResult a = Success !a
| IllegalInput -- 受理出來ない入力があった
| ReachedEOF -- 限界を越えて讀まうとした
deriving (Eq, Show)
-- (>>=) :: Parser a -> (a -> Parser b) -> Parser b
instance Monad Parser where
p >>= f = Parser $! do saved <- get -- 失敗した時の爲に状態を保存
result <- runParser p
case result of
Success a -> runParser (f a)
IllegalInput -> do put saved -- 状態を復歸
return IllegalInput
ReachedEOF -> do put saved -- 状態を復歸
return ReachedEOF
return !x = Parser $! return $! Success x
fail _ = Parser $! return $! IllegalInput
-- |@'failP'@ is just a synonym for @'Prelude.fail'
-- 'Prelude.undefined'@.
failP :: Parser a
failP = fail undefined
-- |@'parse' p bstr@ parses @bstr@ with @p@ and returns @(# result,
-- remaining #)@.
parse :: Parser a -> Lazy.ByteString -> ( ParserResult a, Lazy.ByteString )
parse !p input -- input は lazy である必要有り。
= let (!result, state') = runState (runParser p) (PST input True)
in
( result, pstInput state' ) -- pstInput state' も lazy である必要有り。
-- |@'parseStr' p str@ packs @str@ and parses it.
parseStr :: Parser a -> String -> ( ParserResult a, Lazy.ByteString )
parseStr !p input -- input は lazy である必要有り。
= parse p (B.pack input)
anyChar :: Parser Char
anyChar = Parser $!
do state@(PST input _) <- get
if B.null input then
return ReachedEOF
else
do put $! state { pstInput = B.tail input }
return (Success $! B.head input)
eof :: Parser ()
eof = Parser $!
do PST input _ <- get
if B.null input then
return $! Success ()
else
return IllegalInput
-- |@'allowEOF' p@ makes @p@ treat reaching EOF a normal failure.
allowEOF :: Parser a -> Parser a
allowEOF !f
= Parser $! do saved@(PST _ isEOFFatal) <- get
put $! saved { pstIsEOFFatal = False }
result <- runParser f
state <- get
put $! state { pstIsEOFFatal = isEOFFatal }
return result
satisfy :: (Char -> Bool) -> Parser Char
satisfy !f
= do c <- anyChar
if f c then
return c
else
failP
char :: Char -> Parser Char
char !c = satisfy (== c)
string :: String -> Parser String
string !str
= let bs = B.pack str
len = B.length bs
in
Parser $!
do st <- get
let (bs', rest) = B.splitAt len $ pstInput st
st' = st { pstInput = rest }
if B.length bs' < len then
return ReachedEOF
else
if bs == bs' then
do put st'
return $ Success str
else
return IllegalInput
infixr 0 <|>
-- |This is the backtracking alternation. There is no non-backtracking
-- equivalent.
(<|>) :: Parser a -> Parser a -> Parser a
(!f) <|> (!g)
= Parser $! do saved <- get -- 状態を保存
result <- runParser f
case result of
Success a -> return $! Success a
IllegalInput -> do put saved -- 状態を復歸
runParser g
ReachedEOF -> if pstIsEOFFatal saved then
do put saved
return ReachedEOF
else
do put saved
runParser g
choice :: [Parser a] -> Parser a
choice = foldl (<|>) failP
oneOf :: [Char] -> Parser Char
oneOf = foldl (<|>) failP . map char
notFollowedBy :: Parser a -> Parser ()
notFollowedBy !p
= Parser $! do saved <- get -- 状態を保存
result <- runParser p
case result of
Success _ -> do put saved -- 状態を復歸
return IllegalInput
IllegalInput -> do put saved -- 状態を復歸
return $! Success ()
ReachedEOF -> do put saved -- 状態を復歸
return $! Success ()
digit :: Parser Char
digit = do c <- anyChar
if c >= '0' && c <= '9' then
return c
else
failP
hexDigit :: Parser Char
hexDigit = do c <- anyChar
if (c >= '0' && c <= '9') ||
(c >= 'a' && c <= 'f') ||
(c >= 'A' && c <= 'F') then
return c
else
failP
many :: forall a. Parser a -> Parser [a]
many !p = Parser $!
do state <- get
let ( result, state' ) = many' state Seq.empty
put state'
return result
where
many' :: ParserState -> Seq a -> ( ParserResult [a], ParserState )
many' !st !soFar
= case runState (runParser p) st of
(Success a, st') -> many' st' (soFar |> a)
(IllegalInput, _) -> ( Success (Fold.toList soFar), st )
(ReachedEOF , _) -> if pstIsEOFFatal st then
( ReachedEOF, st )
else
( Success (Fold.toList soFar), st )
manyChar :: Parser Char -> Parser Lazy.ByteString
manyChar !p = Parser $!
do state <- get
case scan' state 0 of
Success len
-> do let (bs, rest) = B.splitAt len (pstInput state)
state' = state { pstInput = rest }
put state'
return $ Success bs
ReachedEOF
-> if pstIsEOFFatal state then
return ReachedEOF
else
error "internal error"
_ -> error "internal error"
where
scan' :: ParserState -> Int64 -> ParserResult Int64
scan' !st !soFar
= case runState (runParser p) st of
(Success _ , st') -> scan' st' (soFar + 1)
(IllegalInput, _ ) -> Success soFar
(ReachedEOF , _ ) -> if pstIsEOFFatal st then
ReachedEOF
else
Success soFar
many1 :: Parser a -> Parser [a]
many1 !p = do x <- p
xs <- many p
return (x:xs)
count :: Int -> Parser a -> Parser [a]
count !n !p = Parser $! count' n p Seq.empty
-- This implementation is rather ugly but we need to make it
-- tail-recursive to avoid stack overflow.
count' :: Int -> Parser a -> Seq a -> State ParserState (ParserResult [a])
count' 0 _ !soFar = return $! Success $! Fold.toList soFar
count' !n !p !soFar = do saved <- get
result <- runParser p
case result of
Success a -> count' (n-1) p (soFar |> a)
IllegalInput -> do put saved
return IllegalInput
ReachedEOF -> do put saved
return ReachedEOF
-- def may be a _|_
option :: a -> Parser a -> Parser a
option def !p = p <|> return def
sepBy :: Parser a -> Parser sep -> Parser [a]
sepBy !p !sep = sepBy1 p sep <|> return []
sepBy1 :: Parser a -> Parser sep -> Parser [a]
sepBy1 !p !sep
= do x <- p
xs <- many $! sep >> p
return (x:xs)
sp :: Parser Char
sp = char ' '
ht :: Parser Char
ht = char '\t'
crlf :: Parser String
crlf = string "\x0d\x0a"
stringLBS :: Lazy.ByteString -> Parser Lazy.ByteString
stringLBS !bs
= let len = B.length bs
in
Parser $!
do st <- get
let (bs', rest) = B.splitAt len $ pstInput st
st' = st { pstInput = rest }
case B.length bs' < len of
True -> return ReachedEOF
False ->
case bs == bs' of
True -> do
put st'
return $ Success bs
False -> return IllegalInput | Andrey-Sisoyev/haskell-PCLT | Text/PCLT/Parser/ParserInternals.hs | lgpl-2.1 | 10,972 | 0 | 19 | 4,388 | 2,619 | 1,304 | 1,315 | -1 | -1 |
{-# LANGUAGE
TemplateHaskell,
TypeOperators #-}
module Object.Templates(
makeName,
makeObject
) where
import Object.Letters
import Object.Types
import Prelude hiding ((.))
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
import Data.Char
import Data.Maybe
-- |
-- takes lower case 'foo' and makes
-- 'type Foo = Method (T_f,T_o,T_o)'
-- 'foo = Method (T_f,T_o,T_o) :: Foo'
makeName :: String -> Q [Dec]
makeName name = fst `fmap` makeName' name
makeName' :: String -> Q ([Dec],(Name,Name))
makeName' name = go where
go
| [] <- name = fail "can't make empty variable"
| not $ isLower $ head name = fail $ name ++ ": does not start with a lower letter"
| (first:rest) <- name = do
typeTuple <- mapM typeCon name
dataTuple <- mapM dataCon name
typeName <- newName $ [toUpper first] ++ rest ++ ['_']
dataName <- newName $ [first] ++ rest
typ <- [t| Method $(return $ foldl AppT (TupleT (length typeTuple)) typeTuple) |]
dat <- [| Method $(return $ (TupE dataTuple)) :: $(return $ ConT typeName) |]
let typeDecl = TySynD typeName [] typ
let dataDecl = ValD (VarP dataName) (NormalB dat) []
return ([typeDecl,dataDecl],(typeName,dataName))
typeCon c = do
Just res <- lookupTypeName $ "T_" ++ [c]
return $ ConT res
dataCon c = do
Just res <- lookupValueName $ "T_" ++ [c]
return $ ConE res
-- |
-- returns (typeName, variableNames, and fields)
getInfo :: Info -> Q (Name, [Name], [VarStrictType])
getInfo (TyConI (DataD context typeName vars [RecC constrName fields] _)) = go where
go = return (typeName, map getVar vars, fields)
getVar (PlainTV n) = n
getVar (KindedTV n _) = n
getInfo _ = fail $ "type needs to have a single constructor record type"
getFieldName (fieldName,strictness,type')
| nameBase fieldName !! 0 /= '_' || not (isLower $ nameBase fieldName !! 1)
= fail $ show fieldName ++
": all fieldNames must commence with a '_' \
\and continue with a lower case letter"
| otherwise = nameBase fieldName
-- |
-- takes a Type with one record constructor
-- 'setGetFunctional \'\'Foo'
-- and produces
-- set and get instances for all fields
makeObject :: Name -> Q [Dec]
makeObject name = go name where
go :: Name -> Q [Dec]
go obj = do
(name, vars, fields) <- reify name >>= getInfo
let objType = foldl AppT (ConT name) (VarT `fmap` vars)
concat `fmap` (sequence $ makeField name vars `fmap` fields)
-- "(Object.Example.Foo,[x_1627454179],[(Object.Example._bar,NotStrict,ConT GHC.Types.Int),(Object.Example._baz,NotStrict,ConT GHC.Types.Char),(Object.Example._blub,NotStrict,VarT x_1627454179)])"
makeField :: Name -> [Name] -> VarStrictType -> Q [Dec]
makeField _ _ (name,_,_) | '_' /= head (nameBase name) = fail $ show name ++ " did not start with underscore"
makeField name vars (fName, _, fType) = do
(decs1,(typeName,dataName)) <- makeName' (tail $ nameBase fName)
let objType = foldl AppT (ConT name) (VarT `fmap` vars)
actionInst <- [d|
instance (value ~ $(return fType)) => Action $(return objType) $(return $ ConT typeName) value where
object . _ = $(return $ VarE fName) object
|]
matchType <- [t| $(return $ ConT typeName) := $(return $ VarT $ mkName "value") |]
actionSetInst <- [d|
instance (value ~ $(return fType), object ~ $(return objType)) => Action $(return objType) $(return matchType) object where
object . ( _ := v) = $(recUpdE [e|object|] [return (fName, VarE $ mkName "v")])
|]
return $ actionInst ++ actionSetInst ++ decs1
| yokto/Object | Object/Templates.hs | apache-2.0 | 3,482 | 16 | 16 | 659 | 1,024 | 541 | 483 | -1 | -1 |
import System.Random
-- import Debug.Trace -- uncomment for tracing
removeAt :: Int -> [a] -> (a, [a])
removeAt 1 (x:xs) = (x,xs)
removeAt n (x:xs) = (a,x:b)
where (a,b) = removeAt (n-1) xs
rnd_select :: [a] -> Int -> IO [a]
rnd_select _ 0 = return []
rnd_select [] _ = return []
rnd_select xs n = randomRIO(1, length xs) >>=
-- (\ i -> return (trace ("random i = " ++ show i) i)) >>=
(\ i -> return $ removeAt i xs) >>=
(\ (x, xs) -> (rnd_select xs (n-1) >>= (\ xs -> return $ x : xs)))
| alephnil/h99 | 23.hs | apache-2.0 | 502 | 0 | 13 | 115 | 259 | 141 | 118 | 11 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Openshift.V1.ClusterNetworkList where
import GHC.Generics
import Data.Text
import Openshift.Unversioned.ListMeta
import Openshift.V1.ClusterNetwork
import qualified Data.Aeson
-- |
data ClusterNetworkList = ClusterNetworkList
{ kind :: Maybe Text -- ^ Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
, apiVersion :: Maybe Text -- ^ APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources
, metadata :: Maybe ListMeta -- ^
, items :: [ClusterNetwork] -- ^ list of cluster networks
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON ClusterNetworkList
instance Data.Aeson.ToJSON ClusterNetworkList
| minhdoboi/deprecated-openshift-haskell-api | openshift/lib/Openshift/V1/ClusterNetworkList.hs | apache-2.0 | 1,235 | 0 | 9 | 173 | 125 | 77 | 48 | 19 | 0 |
module Language.Livescript.Lexer (
lexeme
, identifier
, reserved
, operator
, reservedOp
, charLiteral
, stringLiteral
, natural
, integer
, float
, naturalOrFloat
, decimal
, hexadecimal
, octal
, symbol
, whiteSpace
, parens
, braces
, brackets
, squares
, semi
, comma
, colon
, dot
, identifierStart
) where
import Data.Text (Text)
import Prelude hiding (lex)
import Text.Parsec hiding (State)
import Text.Parsec.Text ()
import qualified Text.Parsec.Token as T
import Language.Livescript.Parser.Type
------------------------------------------------------------------------
identifierStart :: Parser Char
identifierStart = letter <|> oneOf "$_"
livescriptDef :: T.GenLanguageDef Text () ParserState
livescriptDef = T.LanguageDef {
T.caseSensitive = True
, T.commentStart = "/*"
, T.commentEnd = "*/"
, T.commentLine = "#"
, T.nestedComments = False
, T.identStart = identifierStart
, T.identLetter = alphaNum <|> oneOf "$_-"
, T.opStart = oneOf "=:+-"
, T.opLetter = oneOf "="
, T.reservedNames = names
, T.reservedOpNames = ops
}
where
names = []
ops = ["=",":=","+","-"]
lex :: T.GenTokenParser Text () ParserState
lex = T.makeTokenParser livescriptDef
-- everything but commaSep and semiSep
identifier :: Parser String
identifier = T.identifier lex
reserved :: String -> Parser ()
reserved = T.reserved lex
operator :: Parser String
operator = T.operator lex
reservedOp :: String -> Parser ()
reservedOp = T.reservedOp lex
charLiteral :: Parser Char
charLiteral = T.charLiteral lex
stringLiteral :: Parser String
stringLiteral = T.stringLiteral lex
natural :: Parser Integer
natural = T.natural lex
integer :: Parser Integer
integer = T.integer lex
float :: Parser Double
float = T.float lex
naturalOrFloat :: Parser (Either Integer Double)
naturalOrFloat = T.naturalOrFloat lex
decimal :: Parser Integer
decimal = T.decimal lex
hexadecimal :: Parser Integer
hexadecimal = T.hexadecimal lex
octal :: Parser Integer
octal = T.octal lex
symbol :: String -> Parser String
symbol = T.symbol lex
whiteSpace :: Parser ()
whiteSpace = T.whiteSpace lex
parens :: Parser a -> Parser a
parens = T.parens lex
braces :: Parser a -> Parser a
braces = T.braces lex
squares :: Parser a -> Parser a
squares = T.squares lex
semi :: Parser String
semi = T.semi lex
comma :: Parser String
comma = T.comma lex
colon :: Parser String
colon = T.colon lex
dot :: Parser String
dot = T.dot lex
brackets :: Parser a -> Parser a
brackets = T.brackets lex
lexeme :: Parser a -> Parser a
lexeme = T.lexeme lex
| jystic/language-livescript | src/Language/Livescript/Lexer.hs | apache-2.0 | 2,778 | 0 | 8 | 672 | 832 | 451 | 381 | 99 | 1 |
{- |
Module : Codec.Goat.Util
Description : Various utility functions
Copyright : (c) Daniel Lovasko, 2016-2017
License : BSD3
Maintainer : Daniel Lovasko <[email protected]>
Stability : stable
Portability : portable
Various utility functions used throughout the codebase.
-}
module Codec.Goat.Util
( aiGetByteString
, aiPutByteString
, bool
, first
, fromBools
, inBounds
, packBits
, select
, sub
, toBools
, unpackBits
) where
import Data.Bits
import Data.Int
import Data.List.Split (chunksOf)
import Data.Serialize
import Data.Word
import qualified Data.ByteString as B
-- | Check whether a value falls between the bounds (inclusive).
inBounds :: (Ord a)
=> (a, a) -- ^ bounds
-> a -- ^ value
-> Bool -- ^ decision
inBounds (lo, hi) x = lo <= x && x <= hi
-- | Correct subtraction of two unsigned integers.
sub :: Word32 -- ^ first word
-> Word32 -- ^ second word
-> Int64 -- ^ result
sub a b = fromIntegral a - fromIntegral b
-- | Pack a list of bits into a more compact form.
packBits :: [Bool] -- ^ bits
-> B.ByteString -- ^ bytestring
packBits xs = B.pack $ map fromBools (chunksOf 8 xs)
-- | Unpack a compact block of bytes into a list of bools.
unpackBits :: B.ByteString -- ^ bits
-> [Bool] -- ^ bytestring
unpackBits b = concatMap toBools (B.unpack b)
-- | Functional equivalent of the 'if/then/else' construct.
bool :: a -- ^ True option
-> a -- ^ False option
-> Bool -- ^ bool
-> a -- ^ result
bool x _ True = x
bool _ y False = y
-- | Convert a Bits instance into a list of bools.
toBools :: (FiniteBits b)
=> b -- ^ Bits instance
-> [Bool] -- ^ bits
toBools bits = map (testBit bits) [0..finiteBitSize bits-1]
-- | Convert a list of bools into a Bits instance.
fromBools :: (Num b, FiniteBits b)
=> [Bool] -- ^ bits
-> b -- ^ Bits instance
fromBools = foldr (\b i -> bool (bit 0) 0 b .|. shift i 1) 0
-- | Select only certain elements from the list based on the boolean values.
select :: [a] -- ^ list
-> [Bool] -- ^ presence flags
-> [a] -- ^ filtered list
select [] _ = []
select _ [] = []
select (x:xs) (b:bs) = bool (x : select xs bs) (select xs bs) b
-- | Apply a function to the first element of a pair.
first :: (a -> b) -- ^ function
-> (a, x) -- ^ old pair
-> (b, x) -- ^ new pair
first f (a, x) = (f a, x)
-- | Architecture-independent serialization of a strict ByteString.
aiPutByteString :: B.ByteString -- ^ bytestring to parse
-> Put -- ^ writer
aiPutByteString bs = putListOf putWord8 (B.unpack bs)
-- | Architecture-independent deserialization of a lazy ByteString.
aiGetByteString :: Get B.ByteString -- ^ reader
aiGetByteString = B.pack <$> getListOf getWord8
| lovasko/goat | src/Codec/Goat/Util.hs | bsd-2-clause | 2,857 | 0 | 11 | 745 | 661 | 376 | 285 | 62 | 1 |
{-# LANGUAGE CPP, MultiParamTypeClasses, OverloadedStrings #-}
module HTIG.Session
( HSession(HSession)
) where
import Control.Applicative ((<$>))
import Control.Monad (forM_, when)
import Data.Binary (decodeFile, encodeFile)
import Data.List (delete)
import Data.Maybe (isJust)
import System.Directory (doesFileExist)
import System.FilePath ((</>))
import qualified Data.ByteString.Char8 as B
import qualified Data.Map as Map
import qualified Data.Set as Set
import HTIG.Core
import HTIG.Channel.OAuth
import HTIG.Database
import HTIG.IRCServer
import HTIG.TwitterAPI
import HTIG.Utils
#include "../debug.hs"
data HSession = HSession
instance ISession HSession GlobalState SessionState where
handleClose _ = do
whenHasNickAndUser $ do
Just nick <- sNick <$> getLocal
chans <- sJoinedChannels <$> getLocal
forM_ chans $ \chan -> quitChannel chan (Nick nick) (Just "Connection closed")
killAllH
whenM (isJust . sNick <$> getLocal) $ do
Just nick <- sNick <$> getLocal
modifyGlobal $ \g ->
let ns = gsNicks g
in g { gsNicks = delete nick ns }
handlePing _ sn msn = writeServerCommand $ PongCmd sn msn
handlePass _ p = modifyLocal $ \s -> s { sPassword = Just p }
handleNick _ n _ = do
debug n
ret <- modifyGlobal' $ \g ->
let ns = gsNicks g
in if n `elem` ns
then (g, False)
else (g { gsNicks = n:ns }, True)
if ret
then do
modifyLocal $ \s -> s { sNick = Just n }
whenHasNickAndUser $ do
debug "nick and user sended"
sendWelcome
doOAuthAuthentication
openUserDatabase
runSessionInitHook
else do
writeServerError eRR_NICKNAMEINUSE $ "nickname \"" ++. n ++. "\" already exists"
closeConn'
handleUser _ username hostname servername realname = do
modifyLocal $ \s -> s { sUserName = Just username
, sHostName = Just hostname
, sServerName = Just servername
, sRealName = Just realname
}
whenHasNickAndUser $ do
debug "nick and user sended"
sendWelcome
doOAuthAuthentication
openUserDatabase
runSessionInitHook
handleJoin _ chans = whenOAuthVerified $ do
debug chans
Just nick <- sNick <$> getLocal
forM_ (Map.keys chans) $ \cname -> do
mchan <- lookupJoinedChan cname
case mchan of
Just chan -> joinChannel chan (Nick nick)
Nothing -> createNewChannel cname
-- TODO: implement
--handleKick _ chans nicks marg = undefined
handlePrivmsg _ targets arg = whenOAuthVerified $ handlePrivmsg'
where
handlePrivmsg'
| "\x01\&ACTION " `B.isPrefixOf` arg && "\x01" `B.isSuffixOf` arg =
doAction targets $ stripAction arg
| otherwise = do
Just nick <- sNick <$> getLocal
chans <- sJoinedChannels <$> getLocal
forM_ chans $ \c ->
when (channelName c `Set.member` targets) $
privmsgChannel c (Nick nick) arg
stripAction = B.init . B.tail . B.dropWhile (/= ' ')
handlePart _ chans marg = whenHasNickAndUser $ do
debug chans
Just nick <- sNick <$> getLocal
forM_ (Set.toList chans) $ \cname -> do
mchan <- lookupJoinedChan cname
case mchan of
Just chan -> do
partChannel chan (Nick nick)
modifyLocal $ \s -> s { sJoinedChannels = filter ((/= cname) . channelName) $ sJoinedChannels s }
writeConn' $ Message (Just $ Nick nick) $ PartCmd (Set.singleton cname) Nothing
debug ("part from" :: String, cname)
Nothing -> return ()
-- TODO: implement
--handleCommand "INVITE" _ chans arg = undefined
lookupJoinedChan :: ChannelName -> HTIG (Maybe HChannel)
lookupJoinedChan cname = do
chans <- sJoinedChannels <$> getLocal
case filter ((cname == ) . channelName) chans of
chan:_ -> return $ Just chan
[] -> return Nothing
createNewChannel :: ChannelName -> HTIG ()
createNewChannel cname = do
Just nick <- sNick <$> getLocal
mchanf <- lookupChannelFactory cname
case mchanf of
Just chanf -> do
chan <- chanf cname
modifyLocal $ \s -> s { sJoinedChannels = chan : sJoinedChannels s }
debug ("create channel for" :: String, cname)
joinChannel chan (Nick nick)
Nothing -> do
-- TODO: return proper error
debug ("no channel found" :: String, cname)
return ()
doAction :: Set.Set TargetName -> B.ByteString -> HTIG ()
doAction ts arg = do
debug (ts, arg)
let (actName, arg') = B.break (== ' ') arg
acts <- actions . gsHConfig <$> getGlobal
case lookup actName acts of
Just act -> act ts arg'
Nothing -> showActionHelp ts
showActionHelp :: Set.Set TargetName -> HTIG ()
showActionHelp ts = do
actNames <- map fst . actions . gsHConfig <$> getGlobal
writeServerCommand $ NoticeCmd ts "[htig] CTCP ACTION COMMANDS:"
forM_ actNames $ \a ->
writeServerCommand $ NoticeCmd ts a
whenHasNickAndUser :: HTIG () -> HTIG ()
whenHasNickAndUser f = do
mn <- sNick <$> getLocal
mu <- sUserName <$> getLocal
when (isJust $ mn >> mu) f
whenOAuthVerified :: HTIG () -> HTIG ()
whenOAuthVerified f = whenM (isJust . sToken <$> getLocal) f
doOAuthAuthentication :: HTIG ()
doOAuthAuthentication = do
mtok <- loadTwitterToken
case mtok of
Just tok -> do
modifyLocal $ \s -> s { sToken = Just tok }
Nothing -> do
Just nick <- sNick <$> getLocal
chan <- oauthChannel "#oauth"
modifyLocal $ \s -> s { sJoinedChannels = chan : sJoinedChannels s }
joinChannel chan (Nick nick)
runSessionInitHook :: HTIG ()
runSessionInitHook = sessionInitHook =<< gsHConfig <$> getGlobal
openUserDatabase :: HTIG ()
openUserDatabase = do
cp <- getUserCacheDir
let dp = cp </> "cache.sqlite"
debug ("opening database" :: String, dp)
conn <- liftIO $ openDatabase dp
modifyLocal $ \s -> s { sDBConn = Just conn }
loadTwitterToken :: HTIG (Maybe Token)
loadTwitterToken = do
makeUserCacheDir
cpath <- getUserCacheDir
let tpath = cpath </> "token"
fexists <- liftIO $ doesFileExist tpath
if fexists
then Just <$> liftIO (decodeFile tpath)
else return Nothing
| nakamuray/htig | HTIG/Session.hs | bsd-3-clause | 6,853 | 0 | 26 | 2,242 | 2,000 | 987 | 1,013 | 167 | 2 |
module Str(
Str,
linesCR, S.stripPrefix,
readFileUTF8,
S.null, S.isPrefixOf, S.drop, S.span, S.length, S.toList, S.all, S.uncons,
ugly, showLength
) where
import qualified Foundation as S
import qualified Foundation.String as S
import qualified Foundation.IO as S
import Data.Tuple.Extra
type Str = S.String
linesCR :: Str -> [Str]
linesCR = S.lines
showLength x = show x
ugly :: S.Integral a => Integer -> a
ugly = S.fromInteger
readFileUTF8 :: FilePath -> IO Str
readFileUTF8 = fmap (fst3 . S.fromBytes S.UTF8) . S.readFile . S.fromString
| ndmitchell/weeder | str/Str-Foundation.hs | bsd-3-clause | 573 | 0 | 11 | 112 | 203 | 119 | 84 | 18 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TupleSections #-}
module Game where
import Prelude hiding (lookup)
import qualified Data.Map.Strict as M
import Data.IORef
import Data.Maybe
import Control.Monad
import Linear.V2
import Types
lookup :: Ord k => M.Map k v -> k -> Maybe v
lookup = flip M.lookup
initialGameState :: GameState
initialGameState = GameState (M.fromList $ whites ++ blacks) Nothing Whites
where whites = map (,White) [ V2 1 0, V2 3 0, V2 5 0, V2 7 0
, V2 0 1, V2 2 1, V2 4 1, V2 6 1
, V2 1 2, V2 3 2, V2 5 2, V2 7 2 ]
blacks = map (,Black) [ V2 0 5, V2 2 5, V2 4 5, V2 6 5
, V2 1 6, V2 3 6, V2 5 6, V2 7 6
, V2 0 7, V2 2 7, V2 4 7, V2 6 7 ]
--where whites = [(V2 4 3, WhiteQueen), (V2 4 7, WhiteQueen)]
-- blacks = map (,Black) [V2 2 5, V2 6 1, V2 5 6]
updateGameState :: IORef GameState -> V2 Int -> IO ()
updateGameState gameStateRef p = do
GameState brd sf trn <- readIORef gameStateRef
let pm = possibleMoves trn brd
save b s t = writeIORef gameStateRef $ GameState b s t
case sf >>= lookup pm >>= findMove p of
Just m -> save (movePawn brd (fromJust sf) m) Nothing (nextTurn trn)
_ -> save brd (Just p) trn
possibleMoves :: Turn -> Board -> PossibleMoves
possibleMoves trn brd = prioritize $ M.mapWithKey allFieldMoves $ turnFilter trn brd
where allFieldMoves p t = fieldMoves p t ++ fieldCaptures p t
prioritize moves = M.filter (not . null)
$ M.map (filter $ \x -> priority x >= maxPriority) moves
where maxPriority = maximum' $ M.map (maximum' . map priority) moves
maximum' l = if null l then 0 else maximum l
priority (Ordinary _) = 1
priority (Capture []) = -1
priority (Capture ps) = 1 + length ps
allowedMove a@(V2 ax ay) = ax `elem` [0..7] && ay `elem` [0..7] && isNothing (M.lookup a brd)
allDirections = [V2 1 1, V2 (-1) 1, V2 (-1) (-1), V2 1 (-1)]
fieldMoves p t = map Ordinary $ filter allowedMove $ uncurry (directional p brd takeWhile) $ case t of
White -> ([V2 1 1, V2 (-1) 1] , 1)
WhiteQueen -> (allDirections , 6)
Black -> ([V2 (-1) (-1), V2 1 (-1)], 1)
BlackQueen -> (allDirections , 6)
fieldCaptures p t = case t of
White -> captures p [Black, BlackQueen] 1
WhiteQueen -> captures p [Black, BlackQueen] 6
Black -> captures p [White, WhiteQueen] 1
BlackQueen -> captures p [White, WhiteQueen] 6
directional p0 b0 f dirs step = concatMap d dirs
where d dir = f (`M.notMember` b0) $ (p0+) . (dir*) . pure <$> [1..step]
captures p en off = rec' brd p []
where f b0 p0 = filter allowedMove
$ concatMap (\a -> directional a b0 takeWhile [vdir p0 a] off)
$ filter (isJust . mfilter (`elem` en) . lookup b0)
$ directional p0 b0 takeWhile1 allDirections off
rec' b0 p0 l = case f b0 p0 of
[] -> [Capture $ reverse l]
m -> concatMap (\m0 -> rec' (movePawn b0 p0 $ Capture [m0]) m0 (m0:l)) m
range :: (Enum a, Num a) => V2 a -> V2 a -> [V2 a]
range (V2 fx fy) (V2 tx ty) = zipWith V2 (r fx tx) (r fy ty)
where r a b = [a, a + signum (b - a) .. b]
movePawn :: Board -> V2 Int -> Move -> Board
movePawn brd from move = M.insert to insert $ foldr M.delete brd seg
where to@(V2 _ toY) = case move of
Ordinary x -> x
Capture xs -> last xs
seg = case move of
Ordinary x -> range from x
Capture xs -> concat $ zipWith range (from : xs) xs
insert = case brd M.! from of
White -> if toY == 7 then WhiteQueen else White
Black -> if toY == 0 then BlackQueen else Black
a -> a
gameScore :: GameState -> String
gameScore (GameState brd _ _) = w ++ " : " ++ b
where w = show $ getScore Whites brd
b = show $ getScore Blacks brd
| korrix/Warcaby | Game.hs | bsd-3-clause | 4,198 | 0 | 19 | 1,468 | 1,752 | 894 | 858 | 79 | 11 |
-- | Main module to manipulate a git repository.
module Data.Git(
-- * Repository
Git
, gitRepoPath
, openRepo
, closeRepo
, withRepo
, findRepository
-- * Most important question
, findObject
-- * Find named elements
-- ** Obtain a list of existing elements
, getHead
, getBranchNames
, getTagNames
, getRemoteNames
, getRemoteBranchNames
, getGitSvnBranchNames
-- ** Querying for existence
, doesHeadExist
, doesTagExist
, doesRemoteHeadExist
-- ** Obtain the references of the elements
, readBranch
, readTag
, readRemoteBranch
, readGitSvnBranch
-- * Git objects
, GitObject(..)
, CommitAuthor(..)
, CommitInfo(..)
, TagInfo(..)
, FileRights
, TreeEntry
-- * Reference conversion
, Ref
, RefSpec( .. )
, toHexString
, toBinary
, fromHexString
, fromHexText
, fromBinary
, toHex
, fromHex
-- * Revisions
, Revision
, revFromString
, resolveRevision
, readAllRemoteBranches
) where
import Data.Git.Object
import Data.Git.Ref
import Data.Git.Repository
import Data.Git.Revision
| Twinside/hit-simple | Data/Git.hs | bsd-3-clause | 1,730 | 0 | 5 | 896 | 179 | 124 | 55 | 44 | 0 |
module Program(
toCProgram,
ILProgram, ilProgram,
ILFunction, ilFunc) where
import Data.Map as M
import CCodeGen
import RPN
import Syntax
data ILProgram = Prog [ILFunction] [RecordDef]
deriving (Show)
ilProgram = Prog
toCProgram :: ILProgram -> CProgram
toCProgram (Prog funcs recDefs) = cProgram defaultImports prototypes funcDefs
where
defaultImports = [cInclude "BasicRuntime.h"]
prototypes = Prelude.map makePrototype funcs
userDefFuncsMap = M.fromList $ zip (Prelude.map fName funcs) (Prelude.map mkFDef funcs)
funcDefMap = M.union builtinMap userDefFuncsMap
constructors = constructorMap recDefs
accessors = accessorMap recDefs
funcDefs = Prelude.map (toCFunc funcDefMap constructors accessors) funcs
constructorMap :: [RecordDef] -> Map String Int
constructorMap recDefs = M.fromList $ zip (Prelude.map constructor recDefs) (Prelude.map numFields recDefs)
accessorMap :: [RecordDef] -> Map String Int
accessorMap recDefs = M.fromList $ concat $ Prelude.map accessors recDefs
data ILFunction = ILF { fName :: String, fArgNames :: [String], fBody :: Expr }
deriving (Show)
ilFunc :: String -> [String] -> Expr -> ILFunction
ilFunc name argNames body = ILF name argNames body
toCFunc :: Map String FDef -> Map String Int -> Map String Int -> ILFunction -> CFunction
toCFunc fDefs conArities accInds (ILF name argNames expr) = cFunc name body
where
vMap = M.fromList $ zip argNames [1..]
body = Prelude.map toCCode $ toRPN fDefs vMap accInds conArities expr
mkFDef :: ILFunction -> FDef
mkFDef (ILF fn args _) = fdef fn (length args)
makePrototype :: ILFunction -> CFuncDeclaration
makePrototype (ILF name _ _) = stdDec name | dillonhuff/IntLang | src/Program.hs | bsd-3-clause | 1,730 | 0 | 11 | 334 | 562 | 294 | 268 | 36 | 1 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE ViewPatterns, PatternSynonyms, RankNTypes #-}
module Homework01 (
toDigits,
toDigitsRev,
myReverse,
doubleEveryOther,
sumDigits,
sumList,
sumDigitsFoldWorker,
validate,
checkSum,
hanoi,
) where
-- | Convert a number [like 1234] into a list of individual digits [like [1, 2, 3, 4]
toDigits :: Integer -> [Integer]
toDigits num = toDigitsWorker 1 num []
-- | Recursive helper for toDigits.
toDigitsWorker :: Integer -> Integer -> [Integer] -> [Integer]
toDigitsWorker place num list
| num <= 0 = list
| otherwise = toDigitsWorker placeVal (num - remainder) (digit:list)
where
placeVal = place*10
remainder = num `mod` placeVal
digit = remainder `div` place
-- | Convert a number [like 1234] into a list of individual reversed digits [like [4, 3, 2, 1]
toDigitsRev :: Integer -> [Integer]
toDigitsRev num = myReverse (toDigits num)
-- | Reverse a list by recursing through it
myReverse :: [Integer] -> [Integer]
myReverse nums = myReverseWorker nums []
-- | Recursive helper for myReverse
myReverseWorker :: [Integer] -> [Integer] -> [Integer]
myReverseWorker [] newList = newList
myReverseWorker (num:rest) newList = myReverseWorker rest (num:newList)
-- | Double every other number in the list, starting from the right
doubleEveryOther :: [Integer] -> [Integer]
doubleEveryOther nums = doubleEveryOtherReverseWorker (myReverse nums) []
-- | Recursive, reversed helper for doubling every other number in the list, starting from the right
doubleEveryOtherReverseWorker :: [Integer] -> [Integer] -> [Integer]
doubleEveryOtherReverseWorker [] newList = newList
doubleEveryOtherReverseWorker (x:[]) newList = x:newList
doubleEveryOtherReverseWorker (x:(y:zs)) newList = doubleEveryOtherReverseWorker zs ((y * 2):x:newList)
-- | Sum the individual digits from a list of numbers produced by doubleEveryOther
sumDigits :: [Integer] -> Integer
sumDigits nums = foldr sumDigitsFoldWorker 0 nums
-- | Sum Integers from a list, using a right fold
sumList :: [Integer] -> Integer
sumList nums = foldr (+) 0 nums
-- | fold worker for sumDigits.
sumDigitsFoldWorker :: Integer -> Integer -> Integer
sumDigitsFoldWorker newNum priorNum
| newNum < 0 = priorNum + 0
| newNum < 10 = priorNum + newNum
| otherwise = priorNum + (sumList (toDigits newNum))
-- | Validate a credit card number, entered as an Integer
validate :: Integer -> Bool
validate num = (checkSum num) `mod` 10 == 0
-- | Helper to calculate the checksum value
checkSum :: Integer -> Integer
checkSum num = (sumDigits (doubleEveryOther (toDigits num)))
-- | It's an alias for the peg name
type Peg = String
-- | A move is pair of two pegs (from, to)
type Move = (Peg, Peg)
-- | This function takes the number of discs and the three peg names and returns the list of moves to solve the puzzle
hanoi :: Integer -> Peg -> Peg -> Peg -> [Move]
hanoi 1 start dest _ = (start, dest):[]
hanoi discs start dest storage = (hanoi (discs - 1) start storage dest) ++ (start, dest):[] ++ (hanoi (discs - 1) storage dest start)
--hanoiWorker :: Integer -> Peg -> Peg -> Peg -> [Move] -> [Move]
--hanoiWorker 1 start dest storage pastMoves = ((start, dest) : pastMoves)
--hanoiWorker discs start dest storage pastMoves = (hanoi (discs - 1) start storage dest):()
| jeyoor/haskell-learning-challenge | learn_haskell_github_courses/Cis194/src/Homework01.hs | bsd-3-clause | 3,336 | 0 | 11 | 616 | 799 | 439 | 360 | 53 | 1 |
module Candide.Server (
runCandideDaemon
) where
import Control.Applicative
import Control.Concurrent (threadDelay)
import Control.Concurrent.Async
import qualified Control.Concurrent.Async.Lifted as AL
import Control.Concurrent.MVar
import Control.Exception (throw)
import Control.Monad
import Control.Monad.Error
import Control.Monad.State.Lazy
import Data.Attoparsec.ByteString.Lazy (Parser)
import Data.Attoparsec.Combinator (eitherP)
import qualified Data.Attoparsec.Lazy as Parser
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import Data.ByteString.Builder (Builder, byteString,
toLazyByteString)
import qualified Data.ByteString.Char8 as BSC
import qualified Data.ByteString.Lazy as L
import qualified Data.HashMap.Strict as H
import Data.List
import Data.Maybe
import Data.Monoid
import Data.Packer
import qualified Data.Set as S
import Data.Time.Clock
import Data.Word
import Database.PostgreSQL.Simple as PG
import Pipes
import Pipes.Attoparsec (parsed)
import qualified Pipes.ByteString as PB
import Pipes.Group (FreeF (..), FreeT (..))
import qualified Pipes.Group as PG
import qualified Pipes.Lift as P
import qualified Pipes.Prelude as P
import System.IO
import System.Log.Logger
import Candide.Core
import Marquise.Classes
import Marquise.Client
import Marquise.Types
import Vaultaire.Types
data ContentsRequest = ContentsRequest Address SourceDict
deriving Show
runCandideDaemon :: String -> Word16 -> String -> String -> Origin -> String -> MVar () -> String -> Integer -> IO (Async ())
runCandideDaemon host port user pass origin namespace shutdown cache_file cache_flush_period = async $ do
infoM "Server.runCandideDaemon" $ "Reading SourceDict cache from " ++ cache_file
init_cache <- withFile cache_file ReadWriteMode $ \h -> do
result <- fromWire <$> BSC.hGetContents h
case result of
Left e -> do
warningM "Server.runCandideDaemon" $
concat ["Error decoding hash file: "
, show e
, " Continuing with empty initial cache"
]
return emptySourceCache
Right cache -> do
debugM "Server.runCandideDaemon" $
concat ["Read "
, show (sizeOfSourceCache cache)
, " hashes from source dict cache."
]
return cache
infoM "Server.runCandideDaemon" "Connecting to Candide"
conn <- candideConnection host port user pass (Just origin)
infoM "Server.runCandideDaemon" "Candide daemon started"
(points_loop, final_cache) <- do
sn <- makeSpoolName namespace
debugM "Server.runCandideDaemon" "Creating spool directories"
createDirectories sn
debugM "Server.runCandideDaemon" "Starting point transmitting thread"
points_loop <- AL.async (sendPoints conn sn shutdown)
currTime <- do
link points_loop
debugM "Server.runCandideDaemon" "Starting contents transmitting thread"
getCurrentTime
final_cache <- sendContents conn sn init_cache cache_file cache_flush_period currTime shutdown
return (points_loop, final_cache)
-- debugM "Server.runCandideDaemon" "Send loop shut down gracefully, writing out cache"
-- S.writeFile cache_file $ toWire final_cache
debugM "Server.runCandideDaemon" "Waiting for points loop thread"
AL.wait points_loop
sendPoints :: PG.Connection -> SpoolName -> MVar () -> IO ()
sendPoints conn sn shutdown = do
nexts <- nextPoints sn
case nexts of
Just (bytes, seal) -> do
debugM "Server.sendPoints" "Got points, starting transmission pipe"
runEffect $ for (breakInToChunks bytes) sendChunk
debugM "Server.sendPoints" "Transmission complete, cleaning up"
seal
Nothing -> threadDelay idleTime
done <- isJust <$> tryReadMVar shutdown
unless done (sendPoints conn sn shutdown)
where
sendChunk chunk = liftIO $ do
let size = show . BSC.length $ chunk
debugM "Server.sendPoints" $ "Sending chunk of " ++ size ++ " bytes."
let points = P.toList $ yield (SimpleBurst chunk) >-> decodeSimple
writeManySimple conn points
sendContents :: PG.Connection
-> SpoolName
-> SourceDictCache
-> String
-> Integer
-> UTCTime
-> MVar ()
-> IO SourceDictCache
sendContents conn sn initial cache_file cache_flush_period flush_time shutdown = do
nexts <- nextContents sn
(final, newFlushTime) <- case nexts of
Just (bytes, seal) -> do
debugM "Server.sendContents" $
concat
[ "Got contents, starting transmission pipe with "
, show $ sizeOfSourceCache initial
, " cached sources."
]
reqs <- parseContentsRequests bytes
notSeen <- filterSeen reqs initial
newHashes <- S.unions <$> forM notSeen (return . S.singleton . hashRequest)
let final' = S.foldl (flip insertSourceCache) initial newHashes
sendSourceDictUpdate conn notSeen
newFlushTime' <- do
debugM "Server.sendContents" "Contents transmission complete, cleaning up."
debugM "Server.sendContents" $
concat
[ "Saw "
, show $ sizeOfSourceCache final' - sizeOfSourceCache initial
, " new sources."
]
seal
currTime <- getCurrentTime
if currTime > flush_time
then do
debugM "Server.setContents" "Performing periodic cache writeout."
BSC.writeFile cache_file $ toWire final'
return $ addUTCTime (fromInteger cache_flush_period) currTime
else do
debugM "Server.sendContents" $ concat ["Next cache flush at ", show flush_time, "."]
return flush_time
return (final', newFlushTime')
Nothing -> do
threadDelay idleTime
return (initial, flush_time)
done <- isJust <$> tryReadMVar shutdown
if done
then return final
else sendContents conn sn final cache_file cache_flush_period newFlushTime shutdown
where
hashRequest (ContentsRequest _ sd) = hashSource sd
seen cache req = memberSourceCache (hashRequest req) cache
filterSeen reqs cache = do
let (prevSeen, notSeen) = partition (seen cache) reqs
forM_ prevSeen $ \(ContentsRequest addr _) ->
liftIO $ debugM "Server.filterSeen" $ "Seen source dict with address " ++ show addr ++ " before, ignoring."
return notSeen
sendSourceDictUpdate conn reqs = do
forM_ reqs $ \(ContentsRequest addr _) ->
liftIO (debugM "Server.sendContents" $ "Sending contents update for " ++ show addr)
writeManyContents conn $ map reqToTuple reqs
reqToTuple (ContentsRequest addr (SourceDict sd)) = (addr, H.toList sd)
parseContentsRequests :: Monad m => L.ByteString -> m [ContentsRequest]
parseContentsRequests bs = P.toListM $
parsed parseContentsRequest (PB.fromLazy bs)
>>= either (throw . fst) return
parseContentsRequest :: Parser ContentsRequest
parseContentsRequest = do
addr <- fromWire <$> Parser.take 8
len <- runUnpacking getWord64LE <$> Parser.take 8
source_dict <- fromWire <$> Parser.take (fromIntegral len)
case ContentsRequest <$> addr <*> source_dict of
Left e -> fail (show e)
Right request -> return request
idleTime :: Int
idleTime = 1000000 -- 1 second
breakInToChunks :: Monad m => L.ByteString -> Producer BSC.ByteString m ()
breakInToChunks bs =
chunkBuilder (parsed parsePoint (PB.fromLazy bs))
>>= either (throw . fst) return
-- Take a producer of (Int, Builder), where Int is the number of bytes in the
-- builder and produce chunks of n bytes.
--
-- This could be done with explicit recursion and next, but, then we would not
-- get to apply a fold over a FreeT stack of producers. This is almost
-- generalizable, at a stretch.
chunkBuilder :: Monad m => Producer (Int, Builder) m r -> Producer BSC.ByteString m r
chunkBuilder = PG.folds (<>) mempty (L.toStrict . toLazyByteString)
-- Fold over each producer of counted Builders, turning it into
-- a contigous strict ByteString ready for transmission.
. builderChunks idealBurstSize
-- Split the builder producer into FreeT
where
builderChunks :: Monad m
=> Int
-- ^ The size to split a stream of builders at
-> Producer (Int, Builder) m r
-- ^ The input producer
-> FreeT (Producer Builder m) m r
-- ^ The FreeT delimited chunks of that producer, split into
-- the desired chunk length
builderChunks max_size p = FreeT $ do
-- Try to grab the next value from the Producer
x <- next p
return $ case x of
Left r -> Pure r
Right (a, p') -> Free $ do
-- Pass the re-joined Producer to go, which will yield values
-- from it until the desired chunk size is reached.
p'' <- go max_size (yield a >> p')
-- The desired chunk size has been reached, loop and try again
-- with the rest of the stream (possibly empty)
return (builderChunks max_size p'')
-- We take a Producer and pass along its values until we've passed along
-- enough bytes (at least the initial bytes_left).
--
-- When done, returns the remainder of the unconsumed Producer
go :: Monad m
=> Int
-> Producer (Int, Builder) m r
-> Producer Builder m (Producer (Int, Builder) m r)
go bytes_left p =
if bytes_left < 0
then return p
else do
x <- lift (next p)
case x of
Left r ->
return . return $ r
Right ((size, builder), p') -> do
yield builder
go (bytes_left - size) p'
-- Parse a single point, returning the size of the point and the bytes as a
-- builder.
parsePoint :: Parser (Int, Builder)
parsePoint = do
packet <- Parser.take 24
case extendedSize packet of
Just len -> do
-- We must ensure that we get this many bytes now, or attoparsec
-- will just backtrack on us. We do this with a dummy parser inside
-- an eitherP
--
-- This is only to get good error messages.
extended <- eitherP (Parser.take len) (return ())
case extended of
Left bytes ->
let b = byteString packet <> byteString bytes
in return (24 + len, b)
Right () ->
fail "not enough bytes in alleged extended burst"
Nothing ->
return (24, byteString packet)
-- Return the size of the extended segment, if the point is an extended one.
extendedSize :: BSC.ByteString -> Maybe Int
extendedSize packet = flip runUnpacking packet $ do
addr <- Address <$> getWord64LE
if isAddressExtended addr
then do
unpackSkip 8
Just . fromIntegral <$> getWord64LE -- length
else
return Nothing
-- A burst should be, at maximum, very close to this size, unless the user
-- decides to send a very long extended point.
idealBurstSize :: Int
idealBurstSize = 1048576
| anchor/candide | lib/Candide/Server.hs | bsd-3-clause | 12,628 | 4 | 23 | 4,394 | 2,594 | 1,298 | 1,296 | 227 | 4 |
module Test4
( DAbs
, DAll(..)
, DRecAll(..)
, DRec(DRec) -- no fields
, DSome(One, Two)
, foo
, bar
) where
foo x = 1 :: Int
bar y = 2 :: Int
lam :: Int -> Int
lam x = 2 * x
data DAbs = NotExported
data DAll = Me | AndMe
data DRec = DRec { drNotExported :: Int }
data DRecAll = DRecAll { drA :: Int, drB :: Int }
data DSome = One String | Two Int Int | NotThree
| robinp/nemnem | tsrc/Test4.hs | bsd-3-clause | 383 | 0 | 8 | 107 | 161 | 99 | 62 | 21 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Sort3Spec where
import qualified Sort3
import Test.Hspec (Spec, hspec, describe, shouldSatisfy)
import Test.Hspec.QuickCheck (prop)
-- | Required for auto-discovery.
spec :: Spec
spec =
describe "Sort3" $ do
prop "sort3 sorts correctly" $ do
\(triple :: (Int, Int, Int)) ->
let (a0', a1', a2') = Sort3.sort3 triple
in a0' <= a1' && a1' <= a2'
main :: IO ()
main = hspec spec
| FranklinChen/twenty-four-days2015-of-hackage | test/Sort3Spec.hs | bsd-3-clause | 452 | 0 | 17 | 101 | 147 | 82 | 65 | 14 | 1 |
module Eval(eval) where
import Syntax
eval :: Expr -> Env -> Value
eval (ELit (LInt n)) _ = VInt n
eval (ELit (LBool b)) _ = VBool b
eval (EOp op e1 e2) env = binop op e1 e2 env
eval (EVar x) env =
case lookupEnv x env of
VThunk (EFix f e) env' -> eval (EFix f e) env'
v -> v
eval (ELam x e) env = VClos x e env
eval (EApp e1 e2) env = v2 `seq` eval e3 ((x, v2):env')
where
VClos x e3 env' = eval e1 env
v2 = eval e2 env
eval (ELet x e1 e2) env = eval e2 env'
where
v = eval e1 env
env' = env `ext` (x,v)
eval (EFix f e) env = eval e env'
where env' = env `ext` (f, (VThunk (EFix f e) env))
eval (EIf e1 e2 e3) env = if b then eval e2 env else eval e3 env
where VBool b = eval e1 env
binop :: BinOp -> Expr -> Expr -> Env -> Value
binop op e1 e2 env =
case (op, eval e1 env, eval e2 env) of
(Add, VInt n1, VInt n2) -> VInt $ n1 + n2
(Sub, VInt n1, VInt n2) -> VInt $ n1 - n2
(Mul, VInt n1, VInt n2) -> VInt $ n1 * n2
(And, VBool b1, VBool b2) -> VBool $ b1 && b2
(Or, VBool b1, VBool b2) -> VBool $ b1 || b2
(Eq, VInt n1, VInt n2) -> VBool $ n1 == n2
(Eq, VBool b1, VBool b2) -> VBool $ b1 == b2
(Gt, VInt n1, VInt n2) -> VBool $ n1 > n2
(Lt, VInt n1, VInt n2) -> VBool $ n1 < n2
(_,_,_) -> error $ "FATAL ERROR: " ++ show op ++ ":" ++ show e1 ++ ", " ++ show e2
-- Last one should be unreachable when the expression is typed.
| succzero/fino | src/Eval.hs | bsd-3-clause | 1,511 | 0 | 13 | 506 | 798 | 408 | 390 | 34 | 10 |
{-# LANGUAGE OverloadedStrings, GeneralizedNewtypeDeriving, CPP #-}
{- Note: [The need for Ar.hs]
Building `-staticlib` required the presence of libtool, and was a such
restricted to mach-o only. As libtool on macOS and gnu libtool are very
different, there was no simple portable way to support this.
libtool for static archives does essentially: concatinate the input archives,
add the input objects, and create a symbol index. Using `ar` for this task
fails as even `ar` (bsd and gnu, llvm, ...) do not provide the same
features across platforms (e.g. index prefixed retrieval of objects with
the same name.)
As Archives are rather simple structurally, we can just build the archives
with Haskell directly and use ranlib on the final result to get the symbol
index. This should allow us to work around with the differences/abailability
of libtool across different platforms.
-}
module Ar
(ArchiveEntry(..)
,Archive(..)
,afilter
,parseAr
,loadAr
,loadObj
,writeBSDAr
,writeGNUAr
,isBSDSymdef
,isGNUSymdef
)
where
import GhcPrelude
import Data.List (mapAccumL, isPrefixOf)
import Data.Monoid ((<>))
import Data.Binary.Get
import Data.Binary.Put
import Control.Monad
import Control.Applicative
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as C
import qualified Data.ByteString.Lazy as L
#if !defined(mingw32_HOST_OS)
import qualified System.Posix.Files as POSIX
#endif
import System.FilePath (takeFileName)
data ArchiveEntry = ArchiveEntry
{ filename :: String -- ^ File name.
, filetime :: Int -- ^ File modification time.
, fileown :: Int -- ^ File owner.
, filegrp :: Int -- ^ File group.
, filemode :: Int -- ^ File mode.
, filesize :: Int -- ^ File size.
, filedata :: B.ByteString -- ^ File bytes.
} deriving (Eq, Show)
newtype Archive = Archive [ArchiveEntry]
deriving (Eq, Show, Semigroup, Monoid)
afilter :: (ArchiveEntry -> Bool) -> Archive -> Archive
afilter f (Archive xs) = Archive (filter f xs)
isBSDSymdef, isGNUSymdef :: ArchiveEntry -> Bool
isBSDSymdef a = "__.SYMDEF" `isPrefixOf` (filename a)
isGNUSymdef a = "/" == (filename a)
-- | Archives have numeric values padded with '\x20' to the right.
getPaddedInt :: B.ByteString -> Int
getPaddedInt = read . C.unpack . C.takeWhile (/= '\x20')
putPaddedInt :: Int -> Int -> Put
putPaddedInt padding i = putPaddedString '\x20' padding (show i)
putPaddedString :: Char -> Int -> String -> Put
putPaddedString pad padding s = putByteString . C.pack . take padding $ s `mappend` (repeat pad)
getBSDArchEntries :: Get [ArchiveEntry]
getBSDArchEntries = do
empty <- isEmpty
if empty then
return []
else do
name <- getByteString 16
when ('/' `C.elem` name && C.take 3 name /= "#1/") $
fail "Looks like GNU Archive"
time <- getPaddedInt <$> getByteString 12
own <- getPaddedInt <$> getByteString 6
grp <- getPaddedInt <$> getByteString 6
mode <- getPaddedInt <$> getByteString 8
st_size <- getPaddedInt <$> getByteString 10
end <- getByteString 2
when (end /= "\x60\x0a") $
fail ("[BSD Archive] Invalid archive header end marker for name: " ++
C.unpack name)
off1 <- liftM fromIntegral bytesRead :: Get Int
-- BSD stores extended filenames, by writing #1/<length> into the
-- name field, the first @length@ bytes then represent the file name
-- thus the payload size is filesize + file name length.
name <- if C.unpack (C.take 3 name) == "#1/" then
liftM (C.unpack . C.takeWhile (/= '\0')) (getByteString $ read $ C.unpack $ C.drop 3 name)
else
return $ C.unpack $ C.takeWhile (/= ' ') name
off2 <- liftM fromIntegral bytesRead :: Get Int
file <- getByteString (st_size - (off2 - off1))
-- data sections are two byte aligned (see #15396)
when (odd st_size) $
void (getByteString 1)
rest <- getBSDArchEntries
return $ (ArchiveEntry name time own grp mode (st_size - (off2 - off1)) file) : rest
-- | GNU Archives feature a special '//' entry that contains the
-- extended names. Those are referred to as /<num>, where num is the
-- offset into the '//' entry.
-- In addition, filenames are terminated with '/' in the archive.
getGNUArchEntries :: Maybe ArchiveEntry -> Get [ArchiveEntry]
getGNUArchEntries extInfo = do
empty <- isEmpty
if empty
then return []
else
do
name <- getByteString 16
time <- getPaddedInt <$> getByteString 12
own <- getPaddedInt <$> getByteString 6
grp <- getPaddedInt <$> getByteString 6
mode <- getPaddedInt <$> getByteString 8
st_size <- getPaddedInt <$> getByteString 10
end <- getByteString 2
when (end /= "\x60\x0a") $
fail ("[BSD Archive] Invalid archive header end marker for name: " ++
C.unpack name)
file <- getByteString st_size
-- data sections are two byte aligned (see #15396)
when (odd st_size) $
void (getByteString 1)
name <- return . C.unpack $
if C.unpack (C.take 1 name) == "/"
then case C.takeWhile (/= ' ') name of
name@"/" -> name -- symbol table
name@"//" -> name -- extendedn file names table
name -> getExtName extInfo (read . C.unpack $ C.drop 1 name)
else C.takeWhile (/= '/') name
case name of
"/" -> getGNUArchEntries extInfo
"//" -> getGNUArchEntries (Just (ArchiveEntry name time own grp mode st_size file))
_ -> (ArchiveEntry name time own grp mode st_size file :) <$> getGNUArchEntries extInfo
where
getExtName :: Maybe ArchiveEntry -> Int -> B.ByteString
getExtName Nothing _ = error "Invalid extended filename reference."
getExtName (Just info) offset = C.takeWhile (/= '/') . C.drop offset $ filedata info
-- | put an Archive Entry. This assumes that the entries
-- have been preprocessed to account for the extenden file name
-- table section "//" e.g. for GNU Archives. Or that the names
-- have been move into the payload for BSD Archives.
putArchEntry :: ArchiveEntry -> PutM ()
putArchEntry (ArchiveEntry name time own grp mode st_size file) = do
putPaddedString ' ' 16 name
putPaddedInt 12 time
putPaddedInt 6 own
putPaddedInt 6 grp
putPaddedInt 8 mode
putPaddedInt 10 (st_size + pad)
putByteString "\x60\x0a"
putByteString file
when (pad == 1) $
putWord8 0x0a
where
pad = st_size `mod` 2
getArchMagic :: Get ()
getArchMagic = do
magic <- liftM C.unpack $ getByteString 8
if magic /= "!<arch>\n"
then fail $ "Invalid magic number " ++ show magic
else return ()
putArchMagic :: Put
putArchMagic = putByteString $ C.pack "!<arch>\n"
getArch :: Get Archive
getArch = Archive <$> do
getArchMagic
getBSDArchEntries <|> getGNUArchEntries Nothing
putBSDArch :: Archive -> PutM ()
putBSDArch (Archive as) = do
putArchMagic
mapM_ putArchEntry (processEntries as)
where
padStr pad size str = take size $ str <> repeat pad
nameSize name = case length name `divMod` 4 of
(n, 0) -> 4 * n
(n, _) -> 4 * (n + 1)
needExt name = length name > 16 || ' ' `elem` name
processEntry :: ArchiveEntry -> ArchiveEntry
processEntry archive@(ArchiveEntry name _ _ _ _ st_size _)
| needExt name = archive { filename = "#1/" <> show sz
, filedata = C.pack (padStr '\0' sz name) <> filedata archive
, filesize = st_size + sz }
| otherwise = archive
where sz = nameSize name
processEntries = map processEntry
putGNUArch :: Archive -> PutM ()
putGNUArch (Archive as) = do
putArchMagic
mapM_ putArchEntry (processEntries as)
where
processEntry :: ArchiveEntry -> ArchiveEntry -> (ArchiveEntry, ArchiveEntry)
processEntry extInfo archive@(ArchiveEntry name _ _ _ _ _ _)
| length name > 15 = ( extInfo { filesize = filesize extInfo + length name + 2
, filedata = filedata extInfo <> C.pack name <> "/\n" }
, archive { filename = "/" <> show (filesize extInfo) } )
| otherwise = ( extInfo, archive { filename = name <> "/" } )
processEntries :: [ArchiveEntry] -> [ArchiveEntry]
processEntries =
uncurry (:) . mapAccumL processEntry (ArchiveEntry "//" 0 0 0 0 0 mempty)
parseAr :: B.ByteString -> Archive
parseAr = runGet getArch . L.fromChunks . pure
writeBSDAr, writeGNUAr :: FilePath -> Archive -> IO ()
writeBSDAr fp = L.writeFile fp . runPut . putBSDArch
writeGNUAr fp = L.writeFile fp . runPut . putGNUArch
loadAr :: FilePath -> IO Archive
loadAr fp = parseAr <$> B.readFile fp
loadObj :: FilePath -> IO ArchiveEntry
loadObj fp = do
payload <- B.readFile fp
(modt, own, grp, mode) <- fileInfo fp
return $ ArchiveEntry
(takeFileName fp) modt own grp mode
(B.length payload) payload
-- | Take a filePath and return (mod time, own, grp, mode in decimal)
fileInfo :: FilePath -> IO ( Int, Int, Int, Int) -- ^ mod time, own, grp, mode (in decimal)
#if defined(mingw32_HOST_OS)
-- on windows mod time, owner group and mode are zero.
fileInfo _ = pure (0,0,0,0)
#else
fileInfo fp = go <$> POSIX.getFileStatus fp
where go status = ( fromEnum $ POSIX.modificationTime status
, fromIntegral $ POSIX.fileOwner status
, fromIntegral $ POSIX.fileGroup status
, oct2dec . fromIntegral $ POSIX.fileMode status
)
oct2dec :: Int -> Int
oct2dec = foldl' (\a b -> a * 10 + b) 0 . reverse . dec 8
where dec _ 0 = []
dec b i = let (rest, last) = i `quotRem` b
in last:dec b rest
#endif
| sdiehl/ghc | compiler/main/Ar.hs | bsd-3-clause | 10,051 | 0 | 20 | 2,713 | 2,466 | 1,267 | 1,199 | 187 | 8 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
module DataStore.QueueStore
( enqueue
, dequeue
, dequeueAll
) where
import qualified Data.Default as Default
import qualified Data.Serialize as Serialize
import MonadImports
import Aliases
import qualified DataStore.Job as Job
import qualified GithubWebhook.Types.Repo as Repo
import qualified Utils as U
import Database.Persist
import Database.Persist.TH
import Database.Persist.Sqlite
import Control.Monad.Logger (runStderrLoggingT)
enqueue :: ConnectionPool -> Job.Job -> EIO ()
enqueue connectionPool job = liftIO . runStderrLoggingT $ do
runSqlPool (insert job) connectionPool
return ()
dequeue :: ConnectionPool -> Repo.Repo -> EIO Job.Job
dequeue connectionPool repo = liftIO . runStderrLoggingT $ do
let repoID = fromIntegral $ Repo.id repo
let action = selectList [Job.JobRepoID ==. repoID] []
jobs <- runSqlPool action connectionPool
when (null jobs) $ fail "No jobs to dequeue"
let Entity key job = head jobs
runSqlPool (delete key) connectionPool
return job
dequeueAll :: ConnectionPool -> Int -> EIO [Job.Job]
dequeueAll connectionPool repoID = liftIO . runStderrLoggingT $ do
let action = selectList [Job.JobRepoID ==. repoID] []
jobs <- runSqlPool action connectionPool
mapM_ (\(Entity key job) -> runSqlPool (delete key) connectionPool) jobs
return $ map (\(Entity key job) -> job) jobs
| bgwines/hueue | src/DataStore/QueueStore.hs | bsd-3-clause | 1,597 | 0 | 14 | 270 | 446 | 233 | 213 | 41 | 1 |
-------------------------------------------------------------------------------
--
-- | Dynamic flags
--
-- Most flags are dynamic flags, which means they can change from compilation
-- to compilation using @OPTIONS_GHC@ pragmas, and in a multi-session GHC each
-- session can be using different dynamic flags. Dynamic flags can also be set
-- at the prompt in GHCi.
--
-- (c) The University of Glasgow 2005
--
-------------------------------------------------------------------------------
module DynFlags (
-- * Dynamic flags and associated configuration types
DynFlag(..),
WarningFlag(..),
ExtensionFlag(..),
LogAction,
ProfAuto(..),
glasgowExtsFlags,
dopt,
dopt_set,
dopt_unset,
wopt,
wopt_set,
wopt_unset,
xopt,
xopt_set,
xopt_unset,
DynFlags(..),
RtsOptsEnabled(..),
HscTarget(..), isObjectTarget, defaultObjectTarget,
targetRetainsAllBindings,
GhcMode(..), isOneShot,
GhcLink(..), isNoLink,
PackageFlag(..),
Option(..), showOpt,
DynLibLoader(..),
fFlags, fWarningFlags, fLangFlags, xFlags,
wayNames, dynFlagDependencies,
-- ** Safe Haskell
SafeHaskellMode(..),
safeHaskellOn, safeImportsOn, safeLanguageOn, safeInferOn,
packageTrustOn,
safeDirectImpsReq, safeImplicitImpsReq,
unsafeFlags,
-- ** System tool settings and locations
Settings(..),
targetPlatform,
ghcUsagePath, ghciUsagePath, topDir, tmpDir, rawSettings,
extraGccViaCFlags, systemPackageConfig,
pgm_L, pgm_P, pgm_F, pgm_c, pgm_s, pgm_a, pgm_l, pgm_dll, pgm_T,
pgm_sysman, pgm_windres, pgm_lo, pgm_lc,
opt_L, opt_P, opt_F, opt_c, opt_a, opt_l,
opt_windres, opt_lo, opt_lc,
-- ** Manipulating DynFlags
defaultDynFlags, -- Settings -> DynFlags
initDynFlags, -- DynFlags -> IO DynFlags
defaultLogAction,
getOpts, -- DynFlags -> (DynFlags -> [a]) -> [a]
getVerbFlags,
updOptLevel,
setTmpDir,
setPackageName,
doingTickyProfiling,
-- ** Parsing DynFlags
parseDynamicFlagsCmdLine,
parseDynamicFilePragma,
allFlags,
supportedLanguagesAndExtensions,
-- ** DynFlag C compiler options
picCCOpts,
-- * Configuration of the stg-to-stg passes
StgToDo(..),
getStgToDo,
-- * Compiler configuration suitable for display to the user
compilerInfo
#ifdef GHCI
-- Only in stage 2 can we be sure that the RTS
-- exposes the appropriate runtime boolean
, rtsIsProfiled
#endif
) where
#include "HsVersions.h"
import Platform
import Module
import PackageConfig
import PrelNames ( mAIN )
import StaticFlags
import {-# SOURCE #-} Packages (PackageState)
import DriverPhases ( Phase(..), phaseInputExt )
import Config
import CmdLineParser
import Constants ( mAX_CONTEXT_REDUCTION_DEPTH )
import Panic
import Util
import Maybes ( orElse )
import SrcLoc
import FastString
import Outputable
#ifdef GHCI
import Foreign.C ( CInt(..) )
#endif
import {-# SOURCE #-} ErrUtils ( Severity(..), Message, mkLocMessage )
#ifdef GHCI
import System.IO.Unsafe ( unsafePerformIO )
#endif
import Data.IORef
import Control.Monad ( when )
import Data.Char
import Data.List
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import System.FilePath
import System.IO ( stderr, hPutChar )
import Data.IntSet (IntSet)
import qualified Data.IntSet as IntSet
-- -----------------------------------------------------------------------------
-- DynFlags
-- | Enumerates the simple on-or-off dynamic flags
data DynFlag
-- debugging flags
= Opt_D_dump_cmm
| Opt_D_dump_raw_cmm
| Opt_D_dump_cmmz
| Opt_D_dump_cmmz_pretty
-- All of the cmmz subflags (there are a lot!) Automatically
-- enabled if you run -ddump-cmmz
| Opt_D_dump_cmmz_cbe
| Opt_D_dump_cmmz_proc
| Opt_D_dump_cmmz_spills
| Opt_D_dump_cmmz_rewrite
| Opt_D_dump_cmmz_dead
| Opt_D_dump_cmmz_stub
| Opt_D_dump_cmmz_sp
| Opt_D_dump_cmmz_procmap
| Opt_D_dump_cmmz_split
| Opt_D_dump_cmmz_lower
| Opt_D_dump_cmmz_info
| Opt_D_dump_cmmz_cafs
-- end cmmz subflags
| Opt_D_dump_cps_cmm
| Opt_D_dump_cvt_cmm
| Opt_D_dump_asm
| Opt_D_dump_asm_native
| Opt_D_dump_asm_liveness
| Opt_D_dump_asm_coalesce
| Opt_D_dump_asm_regalloc
| Opt_D_dump_asm_regalloc_stages
| Opt_D_dump_asm_conflicts
| Opt_D_dump_asm_stats
| Opt_D_dump_asm_expanded
| Opt_D_dump_llvm
| Opt_D_dump_core_stats
| Opt_D_dump_cpranal
| Opt_D_dump_deriv
| Opt_D_dump_ds
| Opt_D_dump_flatC
| Opt_D_dump_foreign
| Opt_D_dump_inlinings
| Opt_D_dump_rule_firings
| Opt_D_dump_rule_rewrites
| Opt_D_dump_occur_anal
| Opt_D_dump_parsed
| Opt_D_dump_rn
| Opt_D_dump_core_pipeline -- TODO FIXME: dump after simplifier stats
| Opt_D_dump_simpl
| Opt_D_dump_simpl_iterations
| Opt_D_dump_simpl_phases
| Opt_D_dump_spec
| Opt_D_dump_prep
| Opt_D_dump_stg
| Opt_D_dump_stranal
| Opt_D_dump_tc
| Opt_D_dump_types
| Opt_D_dump_rules
| Opt_D_dump_cse
| Opt_D_dump_worker_wrapper
| Opt_D_dump_rn_trace
| Opt_D_dump_rn_stats
| Opt_D_dump_opt_cmm
| Opt_D_dump_simpl_stats
| Opt_D_dump_cs_trace -- Constraint solver in type checker
| Opt_D_dump_tc_trace
| Opt_D_dump_if_trace
| Opt_D_dump_vt_trace
| Opt_D_dump_splices
| Opt_D_dump_BCOs
| Opt_D_dump_vect
| Opt_D_dump_ticked
| Opt_D_dump_rtti
| Opt_D_source_stats
| Opt_D_verbose_core2core
| Opt_D_verbose_stg2stg
| Opt_D_dump_hi
| Opt_D_dump_hi_diffs
| Opt_D_dump_minimal_imports
| Opt_D_dump_mod_cycles
| Opt_D_dump_view_pattern_commoning
| Opt_D_faststring_stats
| Opt_DumpToFile -- ^ Append dump output to files instead of stdout.
| Opt_D_no_debug_output
| Opt_DoCoreLinting
| Opt_DoStgLinting
| Opt_DoCmmLinting
| Opt_DoAsmLinting
| Opt_NoLlvmMangler
| Opt_WarnIsError -- -Werror; makes warnings fatal
| Opt_PrintExplicitForalls
-- optimisation opts
| Opt_Strictness
| Opt_FullLaziness
| Opt_FloatIn
| Opt_Specialise
| Opt_StaticArgumentTransformation
| Opt_CSE
| Opt_LiberateCase
| Opt_SpecConstr
| Opt_DoLambdaEtaExpansion
| Opt_IgnoreAsserts
| Opt_DoEtaReduction
| Opt_CaseMerge
| Opt_UnboxStrictFields
| Opt_DictsCheap
| Opt_EnableRewriteRules -- Apply rewrite rules during simplification
| Opt_Vectorise
| Opt_RegsGraph -- do graph coloring register allocation
| Opt_RegsIterative -- do iterative coalescing graph coloring register allocation
| Opt_PedanticBottoms -- Be picky about how we treat bottom
-- Interface files
| Opt_IgnoreInterfacePragmas
| Opt_OmitInterfacePragmas
| Opt_ExposeAllUnfoldings
-- profiling opts
| Opt_AutoSccsOnIndividualCafs
| Opt_ProfCountEntries
-- misc opts
| Opt_Pp
| Opt_ForceRecomp
| Opt_ExcessPrecision
| Opt_EagerBlackHoling
| Opt_ReadUserPackageConf
| Opt_NoHsMain
| Opt_SplitObjs
| Opt_StgStats
| Opt_HideAllPackages
| Opt_PrintBindResult
| Opt_Haddock
| Opt_HaddockOptions
| Opt_Hpc_No_Auto
| Opt_BreakOnException
| Opt_BreakOnError
| Opt_PrintEvldWithShow
| Opt_PrintBindContents
| Opt_GenManifest
| Opt_EmbedManifest
| Opt_EmitExternalCore
| Opt_SharedImplib
| Opt_BuildingCabalPackage
| Opt_SSE2
| Opt_SSE4_2
| Opt_GhciSandbox
| Opt_GhciHistory
| Opt_HelpfulErrors
-- temporary flags
| Opt_RunCPS
| Opt_RunCPSZ
| Opt_AutoLinkPackages
| Opt_ImplicitImportQualified
| Opt_TryNewCodeGen
-- keeping stuff
| Opt_KeepHiDiffs
| Opt_KeepHcFiles
| Opt_KeepSFiles
| Opt_KeepTmpFiles
| Opt_KeepRawTokenStream
| Opt_KeepLlvmFiles
-- safe haskell flags
| Opt_DistrustAllPackages
| Opt_PackageTrust
deriving (Eq, Show, Enum)
data WarningFlag =
Opt_WarnDuplicateExports
| Opt_WarnHiShadows
| Opt_WarnImplicitPrelude
| Opt_WarnIncompletePatterns
| Opt_WarnIncompleteUniPatterns
| Opt_WarnIncompletePatternsRecUpd
| Opt_WarnMissingFields
| Opt_WarnMissingImportList
| Opt_WarnMissingMethods
| Opt_WarnMissingSigs
| Opt_WarnMissingLocalSigs
| Opt_WarnNameShadowing
| Opt_WarnOverlappingPatterns
| Opt_WarnTypeDefaults
| Opt_WarnMonomorphism
| Opt_WarnUnusedBinds
| Opt_WarnUnusedImports
| Opt_WarnUnusedMatches
| Opt_WarnWarningsDeprecations
| Opt_WarnDeprecatedFlags
| Opt_WarnDodgyExports
| Opt_WarnDodgyImports
| Opt_WarnOrphans
| Opt_WarnAutoOrphans
| Opt_WarnIdentities
| Opt_WarnTabs
| Opt_WarnUnrecognisedPragmas
| Opt_WarnDodgyForeignImports
| Opt_WarnLazyUnliftedBindings
| Opt_WarnUnusedDoBind
| Opt_WarnWrongDoBind
| Opt_WarnAlternativeLayoutRuleTransitional
| Opt_WarnUnsafe
| Opt_WarnSafe
deriving (Eq, Show, Enum)
data Language = Haskell98 | Haskell2010
deriving Enum
-- | The various Safe Haskell modes
data SafeHaskellMode
= Sf_None
| Sf_Unsafe
| Sf_Trustworthy
| Sf_Safe
| Sf_SafeInfered
deriving (Eq)
instance Outputable SafeHaskellMode where
ppr Sf_None = ptext $ sLit "None"
ppr Sf_Unsafe = ptext $ sLit "Unsafe"
ppr Sf_Trustworthy = ptext $ sLit "Trustworthy"
ppr Sf_Safe = ptext $ sLit "Safe"
ppr Sf_SafeInfered = ptext $ sLit "Safe-Infered"
data ExtensionFlag
= Opt_Cpp
| Opt_OverlappingInstances
| Opt_UndecidableInstances
| Opt_IncoherentInstances
| Opt_MonomorphismRestriction
| Opt_MonoPatBinds
| Opt_MonoLocalBinds
| Opt_RelaxedPolyRec -- Deprecated
| Opt_ExtendedDefaultRules -- Use GHC's extended rules for defaulting
| Opt_ForeignFunctionInterface
| Opt_UnliftedFFITypes
| Opt_InterruptibleFFI
| Opt_CApiFFI
| Opt_GHCForeignImportPrim
| Opt_ParallelArrays -- Syntactic support for parallel arrays
| Opt_Arrows -- Arrow-notation syntax
| Opt_TemplateHaskell
| Opt_QuasiQuotes
| Opt_ImplicitParams
| Opt_ImplicitPrelude
| Opt_ScopedTypeVariables
| Opt_UnboxedTuples
| Opt_BangPatterns
| Opt_TypeFamilies
| Opt_OverloadedStrings
| Opt_DisambiguateRecordFields
| Opt_RecordWildCards
| Opt_RecordPuns
| Opt_ViewPatterns
| Opt_GADTs
| Opt_GADTSyntax
| Opt_NPlusKPatterns
| Opt_DoAndIfThenElse
| Opt_RebindableSyntax
| Opt_ConstraintKinds
| Opt_PolyKinds -- Kind polymorphism
| Opt_DataKinds -- Datatype promotion
| Opt_StandaloneDeriving
| Opt_DeriveDataTypeable
| Opt_DeriveFunctor
| Opt_DeriveTraversable
| Opt_DeriveFoldable
| Opt_DeriveGeneric -- Allow deriving Generic/1
| Opt_DefaultSignatures -- Allow extra signatures for defmeths
| Opt_TypeSynonymInstances
| Opt_FlexibleContexts
| Opt_FlexibleInstances
| Opt_ConstrainedClassMethods
| Opt_MultiParamTypeClasses
| Opt_FunctionalDependencies
| Opt_UnicodeSyntax
| Opt_PolymorphicComponents
| Opt_ExistentialQuantification
| Opt_MagicHash
| Opt_EmptyDataDecls
| Opt_KindSignatures
| Opt_ParallelListComp
| Opt_TransformListComp
| Opt_MonadComprehensions
| Opt_GeneralizedNewtypeDeriving
| Opt_RecursiveDo
| Opt_DoRec
| Opt_PostfixOperators
| Opt_TupleSections
| Opt_PatternGuards
| Opt_LiberalTypeSynonyms
| Opt_Rank2Types
| Opt_RankNTypes
| Opt_ImpredicativeTypes
| Opt_TypeOperators
| Opt_PackageImports
| Opt_ExplicitForAll
| Opt_AlternativeLayoutRule
| Opt_AlternativeLayoutRuleTransitional
| Opt_DatatypeContexts
| Opt_NondecreasingIndentation
| Opt_RelaxedLayout
| Opt_TraditionalRecordSyntax
| Opt_ApplicativeFix
deriving (Eq, Enum, Show)
-- | Contains not only a collection of 'DynFlag's but also a plethora of
-- information relating to the compilation of a single file or GHC session
data DynFlags = DynFlags {
ghcMode :: GhcMode,
ghcLink :: GhcLink,
hscTarget :: HscTarget,
settings :: Settings,
hscOutName :: String, -- ^ Name of the output file
extCoreName :: String, -- ^ Name of the .hcr output file
verbosity :: Int, -- ^ Verbosity level: see Note [Verbosity levels]
optLevel :: Int, -- ^ Optimisation level
simplPhases :: Int, -- ^ Number of simplifier phases
maxSimplIterations :: Int, -- ^ Max simplifier iterations
shouldDumpSimplPhase :: Maybe String,
ruleCheck :: Maybe String,
strictnessBefore :: [Int], -- ^ Additional demand analysis
simplTickFactor :: Int, -- ^ Multiplier for simplifier ticks
specConstrThreshold :: Maybe Int, -- ^ Threshold for SpecConstr
specConstrCount :: Maybe Int, -- ^ Max number of specialisations for any one function
liberateCaseThreshold :: Maybe Int, -- ^ Threshold for LiberateCase
floatLamArgs :: Maybe Int, -- ^ Arg count for lambda floating
-- See CoreMonad.FloatOutSwitches
cmdlineHcIncludes :: [String], -- ^ @\-\#includes@
importPaths :: [FilePath],
mainModIs :: Module,
mainFunIs :: Maybe String,
ctxtStkDepth :: Int, -- ^ Typechecker context stack depth
thisPackage :: PackageId, -- ^ name of package currently being compiled
-- ways
ways :: [Way], -- ^ Way flags from the command line
buildTag :: String, -- ^ The global \"way\" (e.g. \"p\" for prof)
rtsBuildTag :: String, -- ^ The RTS \"way\"
-- For object splitting
splitInfo :: Maybe (String,Int),
-- paths etc.
objectDir :: Maybe String,
dylibInstallName :: Maybe String,
hiDir :: Maybe String,
stubDir :: Maybe String,
dumpDir :: Maybe String,
objectSuf :: String,
hcSuf :: String,
hiSuf :: String,
outputFile :: Maybe String,
outputHi :: Maybe String,
dynLibLoader :: DynLibLoader,
-- | This is set by 'DriverPipeline.runPipeline' based on where
-- its output is going.
dumpPrefix :: Maybe FilePath,
-- | Override the 'dumpPrefix' set by 'DriverPipeline.runPipeline'.
-- Set by @-ddump-file-prefix@
dumpPrefixForce :: Maybe FilePath,
includePaths :: [String],
libraryPaths :: [String],
frameworkPaths :: [String], -- used on darwin only
cmdlineFrameworks :: [String], -- ditto
rtsOpts :: Maybe String,
rtsOptsEnabled :: RtsOptsEnabled,
hpcDir :: String, -- ^ Path to store the .mix files
-- Plugins
pluginModNames :: [ModuleName],
pluginModNameOpts :: [(ModuleName,String)],
-- For ghc -M
depMakefile :: FilePath,
depIncludePkgDeps :: Bool,
depExcludeMods :: [ModuleName],
depSuffixes :: [String],
-- Package flags
extraPkgConfs :: [FilePath],
-- ^ The @-package-conf@ flags given on the command line, in the order
-- they appeared.
packageFlags :: [PackageFlag],
-- ^ The @-package@ and @-hide-package@ flags from the command-line
-- Package state
-- NB. do not modify this field, it is calculated by
-- Packages.initPackages and Packages.updatePackages.
pkgDatabase :: Maybe [PackageConfig],
pkgState :: PackageState,
-- Temporary files
-- These have to be IORefs, because the defaultCleanupHandler needs to
-- know what to clean when an exception happens
filesToClean :: IORef [FilePath],
dirsToClean :: IORef (Map FilePath FilePath),
-- Names of files which were generated from -ddump-to-file; used to
-- track which ones we need to truncate because it's our first run
-- through
generatedDumps :: IORef (Set FilePath),
-- hsc dynamic flags
flags :: IntSet,
warningFlags :: IntSet,
-- Don't change this without updating extensionFlags:
language :: Maybe Language,
-- | Safe Haskell mode
safeHaskell :: SafeHaskellMode,
-- We store the location of where some extension and flags were turned on so
-- we can produce accurate error messages when Safe Haskell fails due to
-- them.
thOnLoc :: SrcSpan,
newDerivOnLoc :: SrcSpan,
pkgTrustOnLoc :: SrcSpan,
warnSafeOnLoc :: SrcSpan,
warnUnsafeOnLoc :: SrcSpan,
-- Don't change this without updating extensionFlags:
extensions :: [OnOff ExtensionFlag],
-- extensionFlags should always be equal to
-- flattenExtensionFlags language extensions
extensionFlags :: IntSet,
-- | Message output action: use "ErrUtils" instead of this if you can
log_action :: LogAction,
haddockOptions :: Maybe String,
-- | what kind of {-# SCC #-} to add automatically
profAuto :: ProfAuto
}
data ProfAuto
= NoProfAuto -- ^ no SCC annotations added
| ProfAutoAll -- ^ top-level and nested functions are annotated
| ProfAutoTop -- ^ top-level functions annotated only
| ProfAutoExports -- ^ exported functions annotated only
| ProfAutoCalls -- ^ annotate call-sites
deriving (Enum)
data Settings = Settings {
sTargetPlatform :: Platform, -- Filled in by SysTools
sGhcUsagePath :: FilePath, -- Filled in by SysTools
sGhciUsagePath :: FilePath, -- ditto
sTopDir :: FilePath,
sTmpDir :: String, -- no trailing '/'
-- You shouldn't need to look things up in rawSettings directly.
-- They should have their own fields instead.
sRawSettings :: [(String, String)],
sExtraGccViaCFlags :: [String],
sSystemPackageConfig :: FilePath,
-- commands for particular phases
sPgm_L :: String,
sPgm_P :: (String,[Option]),
sPgm_F :: String,
sPgm_c :: (String,[Option]),
sPgm_s :: (String,[Option]),
sPgm_a :: (String,[Option]),
sPgm_l :: (String,[Option]),
sPgm_dll :: (String,[Option]),
sPgm_T :: String,
sPgm_sysman :: String,
sPgm_windres :: String,
sPgm_lo :: (String,[Option]), -- LLVM: opt llvm optimiser
sPgm_lc :: (String,[Option]), -- LLVM: llc static compiler
-- options for particular phases
sOpt_L :: [String],
sOpt_P :: [String],
sOpt_F :: [String],
sOpt_c :: [String],
sOpt_a :: [String],
sOpt_l :: [String],
sOpt_windres :: [String],
sOpt_lo :: [String], -- LLVM: llvm optimiser
sOpt_lc :: [String] -- LLVM: llc static compiler
}
targetPlatform :: DynFlags -> Platform
targetPlatform dflags = sTargetPlatform (settings dflags)
ghcUsagePath :: DynFlags -> FilePath
ghcUsagePath dflags = sGhcUsagePath (settings dflags)
ghciUsagePath :: DynFlags -> FilePath
ghciUsagePath dflags = sGhciUsagePath (settings dflags)
topDir :: DynFlags -> FilePath
topDir dflags = sTopDir (settings dflags)
tmpDir :: DynFlags -> String
tmpDir dflags = sTmpDir (settings dflags)
rawSettings :: DynFlags -> [(String, String)]
rawSettings dflags = sRawSettings (settings dflags)
extraGccViaCFlags :: DynFlags -> [String]
extraGccViaCFlags dflags = sExtraGccViaCFlags (settings dflags)
systemPackageConfig :: DynFlags -> FilePath
systemPackageConfig dflags = sSystemPackageConfig (settings dflags)
pgm_L :: DynFlags -> String
pgm_L dflags = sPgm_L (settings dflags)
pgm_P :: DynFlags -> (String,[Option])
pgm_P dflags = sPgm_P (settings dflags)
pgm_F :: DynFlags -> String
pgm_F dflags = sPgm_F (settings dflags)
pgm_c :: DynFlags -> (String,[Option])
pgm_c dflags = sPgm_c (settings dflags)
pgm_s :: DynFlags -> (String,[Option])
pgm_s dflags = sPgm_s (settings dflags)
pgm_a :: DynFlags -> (String,[Option])
pgm_a dflags = sPgm_a (settings dflags)
pgm_l :: DynFlags -> (String,[Option])
pgm_l dflags = sPgm_l (settings dflags)
pgm_dll :: DynFlags -> (String,[Option])
pgm_dll dflags = sPgm_dll (settings dflags)
pgm_T :: DynFlags -> String
pgm_T dflags = sPgm_T (settings dflags)
pgm_sysman :: DynFlags -> String
pgm_sysman dflags = sPgm_sysman (settings dflags)
pgm_windres :: DynFlags -> String
pgm_windres dflags = sPgm_windres (settings dflags)
pgm_lo :: DynFlags -> (String,[Option])
pgm_lo dflags = sPgm_lo (settings dflags)
pgm_lc :: DynFlags -> (String,[Option])
pgm_lc dflags = sPgm_lc (settings dflags)
opt_L :: DynFlags -> [String]
opt_L dflags = sOpt_L (settings dflags)
opt_P :: DynFlags -> [String]
opt_P dflags = sOpt_P (settings dflags)
opt_F :: DynFlags -> [String]
opt_F dflags = sOpt_F (settings dflags)
opt_c :: DynFlags -> [String]
opt_c dflags = sOpt_c (settings dflags)
opt_a :: DynFlags -> [String]
opt_a dflags = sOpt_a (settings dflags)
opt_l :: DynFlags -> [String]
opt_l dflags = sOpt_l (settings dflags)
opt_windres :: DynFlags -> [String]
opt_windres dflags = sOpt_windres (settings dflags)
opt_lo :: DynFlags -> [String]
opt_lo dflags = sOpt_lo (settings dflags)
opt_lc :: DynFlags -> [String]
opt_lc dflags = sOpt_lc (settings dflags)
wayNames :: DynFlags -> [WayName]
wayNames = map wayName . ways
-- | The target code type of the compilation (if any).
--
-- Whenever you change the target, also make sure to set 'ghcLink' to
-- something sensible.
--
-- 'HscNothing' can be used to avoid generating any output, however, note
-- that:
--
-- * This will not run the desugaring step, thus no warnings generated in
-- this step will be output. In particular, this includes warnings related
-- to pattern matching. You can run the desugarer manually using
-- 'GHC.desugarModule'.
--
-- * If a program uses Template Haskell the typechecker may try to run code
-- from an imported module. This will fail if no code has been generated
-- for this module. You can use 'GHC.needsTemplateHaskell' to detect
-- whether this might be the case and choose to either switch to a
-- different target or avoid typechecking such modules. (The latter may
-- preferable for security reasons.)
--
data HscTarget
= HscC -- ^ Generate C code.
| HscAsm -- ^ Generate assembly using the native code generator.
| HscLlvm -- ^ Generate assembly using the llvm code generator.
| HscInterpreted -- ^ Generate bytecode. (Requires 'LinkInMemory')
| HscNothing -- ^ Don't generate any code. See notes above.
deriving (Eq, Show)
showHscTargetFlag :: HscTarget -> String
showHscTargetFlag HscC = "-fvia-c"
showHscTargetFlag HscAsm = "-fasm"
showHscTargetFlag HscLlvm = "-fllvm"
showHscTargetFlag HscInterpreted = "-fbyte-code"
showHscTargetFlag HscNothing = "-fno-code"
-- | Will this target result in an object file on the disk?
isObjectTarget :: HscTarget -> Bool
isObjectTarget HscC = True
isObjectTarget HscAsm = True
isObjectTarget HscLlvm = True
isObjectTarget _ = False
-- | Does this target retain *all* top-level bindings for a module,
-- rather than just the exported bindings, in the TypeEnv and compiled
-- code (if any)? In interpreted mode we do this, so that GHCi can
-- call functions inside a module. In HscNothing mode we also do it,
-- so that Haddock can get access to the GlobalRdrEnv for a module
-- after typechecking it.
targetRetainsAllBindings :: HscTarget -> Bool
targetRetainsAllBindings HscInterpreted = True
targetRetainsAllBindings HscNothing = True
targetRetainsAllBindings _ = False
-- | The 'GhcMode' tells us whether we're doing multi-module
-- compilation (controlled via the "GHC" API) or one-shot
-- (single-module) compilation. This makes a difference primarily to
-- the "Finder": in one-shot mode we look for interface files for
-- imported modules, but in multi-module mode we look for source files
-- in order to check whether they need to be recompiled.
data GhcMode
= CompManager -- ^ @\-\-make@, GHCi, etc.
| OneShot -- ^ @ghc -c Foo.hs@
| MkDepend -- ^ @ghc -M@, see "Finder" for why we need this
deriving Eq
instance Outputable GhcMode where
ppr CompManager = ptext (sLit "CompManager")
ppr OneShot = ptext (sLit "OneShot")
ppr MkDepend = ptext (sLit "MkDepend")
isOneShot :: GhcMode -> Bool
isOneShot OneShot = True
isOneShot _other = False
-- | What to do in the link step, if there is one.
data GhcLink
= NoLink -- ^ Don't link at all
| LinkBinary -- ^ Link object code into a binary
| LinkInMemory -- ^ Use the in-memory dynamic linker (works for both
-- bytecode and object code).
| LinkDynLib -- ^ Link objects into a dynamic lib (DLL on Windows, DSO on ELF platforms)
deriving (Eq, Show)
isNoLink :: GhcLink -> Bool
isNoLink NoLink = True
isNoLink _ = False
-- Is it worth evaluating this Bool and caching it in the DynFlags value
-- during initDynFlags?
doingTickyProfiling :: DynFlags -> Bool
doingTickyProfiling _ = opt_Ticky
-- XXX -ticky is a static flag, because it implies -debug which is also
-- static. If the way flags were made dynamic, we could fix this.
data PackageFlag
= ExposePackage String
| ExposePackageId String
| HidePackage String
| IgnorePackage String
| TrustPackage String
| DistrustPackage String
deriving Eq
defaultHscTarget :: HscTarget
defaultHscTarget = defaultObjectTarget
-- | The 'HscTarget' value corresponding to the default way to create
-- object files on the current platform.
defaultObjectTarget :: HscTarget
defaultObjectTarget
| cGhcUnregisterised == "YES" = HscC
| cGhcWithNativeCodeGen == "YES" = HscAsm
| otherwise = HscLlvm
data DynLibLoader
= Deployable
| SystemDependent
deriving Eq
data RtsOptsEnabled = RtsOptsNone | RtsOptsSafeOnly | RtsOptsAll
deriving (Show)
-- | Used by 'GHC.newSession' to partially initialize a new 'DynFlags' value
initDynFlags :: DynFlags -> IO DynFlags
initDynFlags dflags = do
-- someday these will be dynamic flags
ways <- readIORef v_Ways
refFilesToClean <- newIORef []
refDirsToClean <- newIORef Map.empty
refGeneratedDumps <- newIORef Set.empty
return dflags{
ways = ways,
buildTag = mkBuildTag (filter (not . wayRTSOnly) ways),
rtsBuildTag = mkBuildTag ways,
filesToClean = refFilesToClean,
dirsToClean = refDirsToClean,
generatedDumps = refGeneratedDumps
}
-- | The normal 'DynFlags'. Note that they is not suitable for use in this form
-- and must be fully initialized by 'GHC.newSession' first.
defaultDynFlags :: Settings -> DynFlags
defaultDynFlags mySettings =
DynFlags {
ghcMode = CompManager,
ghcLink = LinkBinary,
hscTarget = defaultHscTarget,
hscOutName = "",
extCoreName = "",
verbosity = 0,
optLevel = 0,
simplPhases = 2,
maxSimplIterations = 4,
shouldDumpSimplPhase = Nothing,
ruleCheck = Nothing,
simplTickFactor = 100,
specConstrThreshold = Just 2000,
specConstrCount = Just 3,
liberateCaseThreshold = Just 2000,
floatLamArgs = Just 0, -- Default: float only if no fvs
strictnessBefore = [],
cmdlineHcIncludes = [],
importPaths = ["."],
mainModIs = mAIN,
mainFunIs = Nothing,
ctxtStkDepth = mAX_CONTEXT_REDUCTION_DEPTH,
thisPackage = mainPackageId,
objectDir = Nothing,
dylibInstallName = Nothing,
hiDir = Nothing,
stubDir = Nothing,
dumpDir = Nothing,
objectSuf = phaseInputExt StopLn,
hcSuf = phaseInputExt HCc,
hiSuf = "hi",
pluginModNames = [],
pluginModNameOpts = [],
outputFile = Nothing,
outputHi = Nothing,
dynLibLoader = SystemDependent,
dumpPrefix = Nothing,
dumpPrefixForce = Nothing,
includePaths = [],
libraryPaths = [],
frameworkPaths = [],
cmdlineFrameworks = [],
rtsOpts = Nothing,
rtsOptsEnabled = RtsOptsSafeOnly,
hpcDir = ".hpc",
extraPkgConfs = [],
packageFlags = [],
pkgDatabase = Nothing,
pkgState = panic "no package state yet: call GHC.setSessionDynFlags",
ways = panic "defaultDynFlags: No ways",
buildTag = panic "defaultDynFlags: No buildTag",
rtsBuildTag = panic "defaultDynFlags: No rtsBuildTag",
splitInfo = Nothing,
settings = mySettings,
-- ghc -M values
depMakefile = "Makefile",
depIncludePkgDeps = False,
depExcludeMods = [],
depSuffixes = [],
-- end of ghc -M values
filesToClean = panic "defaultDynFlags: No filesToClean",
dirsToClean = panic "defaultDynFlags: No dirsToClean",
generatedDumps = panic "defaultDynFlags: No generatedDumps",
haddockOptions = Nothing,
flags = IntSet.fromList (map fromEnum defaultFlags),
warningFlags = IntSet.fromList (map fromEnum standardWarnings),
language = Nothing,
safeHaskell = Sf_SafeInfered,
thOnLoc = noSrcSpan,
newDerivOnLoc = noSrcSpan,
pkgTrustOnLoc = noSrcSpan,
warnSafeOnLoc = noSrcSpan,
warnUnsafeOnLoc = noSrcSpan,
extensions = [],
extensionFlags = flattenExtensionFlags Nothing [],
log_action = defaultLogAction,
profAuto = NoProfAuto
}
type LogAction = Severity -> SrcSpan -> PprStyle -> Message -> IO ()
defaultLogAction :: LogAction
defaultLogAction severity srcSpan style msg
= case severity of
SevOutput -> printSDoc msg style
SevInfo -> printErrs msg style
SevFatal -> printErrs msg style
_ -> do hPutChar stderr '\n'
printErrs (mkLocMessage srcSpan msg) style
-- careful (#2302): printErrs prints in UTF-8, whereas
-- converting to string first and using hPutStr would
-- just emit the low 8 bits of each unicode char.
{-
Note [Verbosity levels]
~~~~~~~~~~~~~~~~~~~~~~~
0 | print errors & warnings only
1 | minimal verbosity: print "compiling M ... done." for each module.
2 | equivalent to -dshow-passes
3 | equivalent to existing "ghc -v"
4 | "ghc -v -ddump-most"
5 | "ghc -v -ddump-all"
-}
data OnOff a = On a
| Off a
-- OnOffs accumulate in reverse order, so we use foldr in order to
-- process them in the right order
flattenExtensionFlags :: Maybe Language -> [OnOff ExtensionFlag] -> IntSet
flattenExtensionFlags ml = foldr f defaultExtensionFlags
where f (On f) flags = IntSet.insert (fromEnum f) flags
f (Off f) flags = IntSet.delete (fromEnum f) flags
defaultExtensionFlags = IntSet.fromList (map fromEnum (languageExtensions ml))
languageExtensions :: Maybe Language -> [ExtensionFlag]
languageExtensions Nothing
-- Nothing => the default case
= Opt_NondecreasingIndentation -- This has been on by default for some time
: delete Opt_DatatypeContexts -- The Haskell' committee decided to
-- remove datatype contexts from the
-- language:
-- http://www.haskell.org/pipermail/haskell-prime/2011-January/003335.html
(languageExtensions (Just Haskell2010))
-- NB: MonoPatBinds is no longer the default
languageExtensions (Just Haskell98)
= [Opt_ImplicitPrelude,
Opt_MonomorphismRestriction,
Opt_NPlusKPatterns,
Opt_DatatypeContexts,
Opt_TraditionalRecordSyntax,
Opt_NondecreasingIndentation
-- strictly speaking non-standard, but we always had this
-- on implicitly before the option was added in 7.1, and
-- turning it off breaks code, so we're keeping it on for
-- backwards compatibility. Cabal uses -XHaskell98 by
-- default unless you specify another language.
]
languageExtensions (Just Haskell2010)
= [Opt_ImplicitPrelude,
Opt_MonomorphismRestriction,
Opt_DatatypeContexts,
Opt_TraditionalRecordSyntax,
Opt_EmptyDataDecls,
Opt_ForeignFunctionInterface,
Opt_PatternGuards,
Opt_DoAndIfThenElse,
Opt_RelaxedPolyRec]
-- | Test whether a 'DynFlag' is set
dopt :: DynFlag -> DynFlags -> Bool
dopt f dflags = fromEnum f `IntSet.member` flags dflags
-- | Set a 'DynFlag'
dopt_set :: DynFlags -> DynFlag -> DynFlags
dopt_set dfs f = dfs{ flags = IntSet.insert (fromEnum f) (flags dfs) }
-- | Unset a 'DynFlag'
dopt_unset :: DynFlags -> DynFlag -> DynFlags
dopt_unset dfs f = dfs{ flags = IntSet.delete (fromEnum f) (flags dfs) }
-- | Test whether a 'WarningFlag' is set
wopt :: WarningFlag -> DynFlags -> Bool
wopt f dflags = fromEnum f `IntSet.member` warningFlags dflags
-- | Set a 'WarningFlag'
wopt_set :: DynFlags -> WarningFlag -> DynFlags
wopt_set dfs f = dfs{ warningFlags = IntSet.insert (fromEnum f) (warningFlags dfs) }
-- | Unset a 'WarningFlag'
wopt_unset :: DynFlags -> WarningFlag -> DynFlags
wopt_unset dfs f = dfs{ warningFlags = IntSet.delete (fromEnum f) (warningFlags dfs) }
-- | Test whether a 'ExtensionFlag' is set
xopt :: ExtensionFlag -> DynFlags -> Bool
xopt f dflags = fromEnum f `IntSet.member` extensionFlags dflags
-- | Set a 'ExtensionFlag'
xopt_set :: DynFlags -> ExtensionFlag -> DynFlags
xopt_set dfs f
= let onoffs = On f : extensions dfs
in dfs { extensions = onoffs,
extensionFlags = flattenExtensionFlags (language dfs) onoffs }
-- | Unset a 'ExtensionFlag'
xopt_unset :: DynFlags -> ExtensionFlag -> DynFlags
xopt_unset dfs f
= let onoffs = Off f : extensions dfs
in dfs { extensions = onoffs,
extensionFlags = flattenExtensionFlags (language dfs) onoffs }
-- | Set the Haskell language standard to use
setLanguage :: Language -> DynP ()
setLanguage l = upd f
where f dfs = let mLang = Just l
oneoffs = extensions dfs
in dfs {
language = mLang,
extensionFlags = flattenExtensionFlags mLang oneoffs
}
-- | Some modules have dependencies on others through the DynFlags rather than textual imports
dynFlagDependencies :: DynFlags -> [ModuleName]
dynFlagDependencies = pluginModNames
-- | Is the -fpackage-trust mode on
packageTrustOn :: DynFlags -> Bool
packageTrustOn = dopt Opt_PackageTrust
-- | Is Safe Haskell on in some way (including inference mode)
safeHaskellOn :: DynFlags -> Bool
safeHaskellOn dflags = safeHaskell dflags /= Sf_None
-- | Is the Safe Haskell safe language in use
safeLanguageOn :: DynFlags -> Bool
safeLanguageOn dflags = safeHaskell dflags == Sf_Safe
-- | Is the Safe Haskell safe inference mode active
safeInferOn :: DynFlags -> Bool
safeInferOn dflags = safeHaskell dflags == Sf_SafeInfered
-- | Test if Safe Imports are on in some form
safeImportsOn :: DynFlags -> Bool
safeImportsOn dflags = safeHaskell dflags == Sf_Unsafe ||
safeHaskell dflags == Sf_Trustworthy ||
safeHaskell dflags == Sf_Safe
-- | Set a 'Safe Haskell' flag
setSafeHaskell :: SafeHaskellMode -> DynP ()
setSafeHaskell s = updM f
where f dfs = do
let sf = safeHaskell dfs
safeM <- combineSafeFlags sf s
return $ dfs { safeHaskell = safeM }
-- | Are all direct imports required to be safe for this Safe Haskell mode?
-- Direct imports are when the code explicitly imports a module
safeDirectImpsReq :: DynFlags -> Bool
safeDirectImpsReq d = safeLanguageOn d
-- | Are all implicit imports required to be safe for this Safe Haskell mode?
-- Implicit imports are things in the prelude. e.g System.IO when print is used.
safeImplicitImpsReq :: DynFlags -> Bool
safeImplicitImpsReq d = safeLanguageOn d
-- | Combine two Safe Haskell modes correctly. Used for dealing with multiple flags.
-- This makes Safe Haskell very much a monoid but for now I prefer this as I don't
-- want to export this functionality from the module but do want to export the
-- type constructors.
combineSafeFlags :: SafeHaskellMode -> SafeHaskellMode -> DynP SafeHaskellMode
combineSafeFlags a b | a == Sf_SafeInfered = return b
| b == Sf_SafeInfered = return a
| a == Sf_None = return b
| b == Sf_None = return a
| a == b = return a
| otherwise = addErr errm >> return (panic errm)
where errm = "Incompatible Safe Haskell flags! ("
++ showPpr a ++ ", " ++ showPpr b ++ ")"
-- | A list of unsafe flags under Safe Haskell. Tuple elements are:
-- * name of the flag
-- * function to get srcspan that enabled the flag
-- * function to test if the flag is on
-- * function to turn the flag off
unsafeFlags :: [(String, DynFlags -> SrcSpan, DynFlags -> Bool, DynFlags -> DynFlags)]
unsafeFlags = [("-XGeneralizedNewtypeDeriving", newDerivOnLoc,
xopt Opt_GeneralizedNewtypeDeriving,
flip xopt_unset Opt_GeneralizedNewtypeDeriving),
("-XTemplateHaskell", thOnLoc,
xopt Opt_TemplateHaskell,
flip xopt_unset Opt_TemplateHaskell)]
-- | Retrieve the options corresponding to a particular @opt_*@ field in the correct order
getOpts :: DynFlags -- ^ 'DynFlags' to retrieve the options from
-> (DynFlags -> [a]) -- ^ Relevant record accessor: one of the @opt_*@ accessors
-> [a] -- ^ Correctly ordered extracted options
getOpts dflags opts = reverse (opts dflags)
-- We add to the options from the front, so we need to reverse the list
-- | Gets the verbosity flag for the current verbosity level. This is fed to
-- other tools, so GHC-specific verbosity flags like @-ddump-most@ are not included
getVerbFlags :: DynFlags -> [String]
getVerbFlags dflags
| verbosity dflags >= 4 = ["-v"]
| otherwise = []
setObjectDir, setHiDir, setStubDir, setDumpDir, setOutputDir,
setDylibInstallName,
setObjectSuf, setHiSuf, setHcSuf, parseDynLibLoaderMode,
setPgmP, addOptl, addOptP,
addCmdlineFramework, addHaddockOpts
:: String -> DynFlags -> DynFlags
setOutputFile, setOutputHi, setDumpPrefixForce
:: Maybe String -> DynFlags -> DynFlags
setObjectDir f d = d{ objectDir = Just f}
setHiDir f d = d{ hiDir = Just f}
setStubDir f d = d{ stubDir = Just f, includePaths = f : includePaths d }
-- -stubdir D adds an implicit -I D, so that gcc can find the _stub.h file
-- \#included from the .hc file when compiling via C (i.e. unregisterised
-- builds).
setDumpDir f d = d{ dumpDir = Just f}
setOutputDir f = setObjectDir f . setHiDir f . setStubDir f . setDumpDir f
setDylibInstallName f d = d{ dylibInstallName = Just f}
setObjectSuf f d = d{ objectSuf = f}
setHiSuf f d = d{ hiSuf = f}
setHcSuf f d = d{ hcSuf = f}
setOutputFile f d = d{ outputFile = f}
setOutputHi f d = d{ outputHi = f}
addPluginModuleName :: String -> DynFlags -> DynFlags
addPluginModuleName name d = d { pluginModNames = (mkModuleName name) : (pluginModNames d) }
addPluginModuleNameOption :: String -> DynFlags -> DynFlags
addPluginModuleNameOption optflag d = d { pluginModNameOpts = (mkModuleName m, option) : (pluginModNameOpts d) }
where (m, rest) = break (== ':') optflag
option = case rest of
[] -> "" -- should probably signal an error
(_:plug_opt) -> plug_opt -- ignore the ':' from break
parseDynLibLoaderMode f d =
case splitAt 8 f of
("deploy", "") -> d{ dynLibLoader = Deployable }
("sysdep", "") -> d{ dynLibLoader = SystemDependent }
_ -> ghcError (CmdLineError ("Unknown dynlib loader: " ++ f))
setDumpPrefixForce f d = d { dumpPrefixForce = f}
-- XXX HACK: Prelude> words "'does not' work" ===> ["'does","not'","work"]
-- Config.hs should really use Option.
setPgmP f = let (pgm:args) = words f in alterSettings (\s -> s { sPgm_P = (pgm, map Option args)})
addOptl f = alterSettings (\s -> s { sOpt_l = f : sOpt_l s})
addOptP f = alterSettings (\s -> s { sOpt_P = f : sOpt_P s})
setDepMakefile :: FilePath -> DynFlags -> DynFlags
setDepMakefile f d = d { depMakefile = deOptDep f }
setDepIncludePkgDeps :: Bool -> DynFlags -> DynFlags
setDepIncludePkgDeps b d = d { depIncludePkgDeps = b }
addDepExcludeMod :: String -> DynFlags -> DynFlags
addDepExcludeMod m d
= d { depExcludeMods = mkModuleName (deOptDep m) : depExcludeMods d }
addDepSuffix :: FilePath -> DynFlags -> DynFlags
addDepSuffix s d = d { depSuffixes = deOptDep s : depSuffixes d }
-- XXX Legacy code:
-- We used to use "-optdep-flag -optdeparg", so for legacy applications
-- we need to strip the "-optdep" off of the arg
deOptDep :: String -> String
deOptDep x = case stripPrefix "-optdep" x of
Just rest -> rest
Nothing -> x
addCmdlineFramework f d = d{ cmdlineFrameworks = f : cmdlineFrameworks d}
addHaddockOpts f d = d{ haddockOptions = Just f}
-- -----------------------------------------------------------------------------
-- Command-line options
-- | When invoking external tools as part of the compilation pipeline, we
-- pass these a sequence of options on the command-line. Rather than
-- just using a list of Strings, we use a type that allows us to distinguish
-- between filepaths and 'other stuff'. The reason for this is that
-- this type gives us a handle on transforming filenames, and filenames only,
-- to whatever format they're expected to be on a particular platform.
data Option
= FileOption -- an entry that _contains_ filename(s) / filepaths.
String -- a non-filepath prefix that shouldn't be
-- transformed (e.g., "/out=")
String -- the filepath/filename portion
| Option String
deriving ( Eq )
showOpt :: Option -> String
showOpt (FileOption pre f) = pre ++ f
showOpt (Option s) = s
-----------------------------------------------------------------------------
-- Setting the optimisation level
updOptLevel :: Int -> DynFlags -> DynFlags
-- ^ Sets the 'DynFlags' to be appropriate to the optimisation level
updOptLevel n dfs
= dfs2{ optLevel = final_n }
where
final_n = max 0 (min 2 n) -- Clamp to 0 <= n <= 2
dfs1 = foldr (flip dopt_unset) dfs remove_dopts
dfs2 = foldr (flip dopt_set) dfs1 extra_dopts
extra_dopts = [ f | (ns,f) <- optLevelFlags, final_n `elem` ns ]
remove_dopts = [ f | (ns,f) <- optLevelFlags, final_n `notElem` ns ]
-- -----------------------------------------------------------------------------
-- StgToDo: abstraction of stg-to-stg passes to run.
data StgToDo
= StgDoMassageForProfiling -- should be (next to) last
-- There's also setStgVarInfo, but its absolute "lastness"
-- is so critical that it is hardwired in (no flag).
| D_stg_stats
getStgToDo :: DynFlags -> [StgToDo]
getStgToDo dflags
= todo2
where
stg_stats = dopt Opt_StgStats dflags
todo1 = if stg_stats then [D_stg_stats] else []
todo2 | WayProf `elem` wayNames dflags
= StgDoMassageForProfiling : todo1
| otherwise
= todo1
{- **********************************************************************
%* *
DynFlags parser
%* *
%********************************************************************* -}
-- -----------------------------------------------------------------------------
-- Parsing the dynamic flags.
-- | Parse dynamic flags from a list of command line arguments. Returns the
-- the parsed 'DynFlags', the left-over arguments, and a list of warnings.
-- Throws a 'UsageError' if errors occurred during parsing (such as unknown
-- flags or missing arguments).
parseDynamicFlagsCmdLine :: Monad m =>
DynFlags -> [Located String]
-> m (DynFlags, [Located String], [Located String])
-- ^ Updated 'DynFlags', left-over arguments, and
-- list of warnings.
parseDynamicFlagsCmdLine dflags args = parseDynamicFlags dflags args True
-- | Like 'parseDynamicFlagsCmdLine' but does not allow the package flags
-- (-package, -hide-package, -ignore-package, -hide-all-packages, -package-conf).
-- Used to parse flags set in a modules pragma.
parseDynamicFilePragma :: Monad m =>
DynFlags -> [Located String]
-> m (DynFlags, [Located String], [Located String])
-- ^ Updated 'DynFlags', left-over arguments, and
-- list of warnings.
parseDynamicFilePragma dflags args = parseDynamicFlags dflags args False
parseDynamicFlags :: Monad m =>
DynFlags -> [Located String] -> Bool
-> m (DynFlags, [Located String], [Located String])
parseDynamicFlags dflags0 args cmdline = do
-- XXX Legacy support code
-- We used to accept things like
-- optdep-f -optdepdepend
-- optdep-f -optdep depend
-- optdep -f -optdepdepend
-- optdep -f -optdep depend
-- but the spaces trip up proper argument handling. So get rid of them.
let f (L p "-optdep" : L _ x : xs) = (L p ("-optdep" ++ x)) : f xs
f (x : xs) = x : f xs
f xs = xs
args' = f args
-- Note: -ignore-package (package_flags) must precede -i* (dynamic_flags)
flag_spec | cmdline = package_flags ++ dynamic_flags
| otherwise = dynamic_flags
let ((leftover, errs, warns), dflags1)
= runCmdLine (processArgs flag_spec args') dflags0
when (not (null errs)) $ ghcError $ errorsToGhcException errs
-- check for disabled flags in safe haskell
let (dflags2, sh_warns) = safeFlagCheck cmdline dflags1
return (dflags2, leftover, sh_warns ++ warns)
-- | Check (and potentially disable) any extensions that aren't allowed
-- in safe mode.
safeFlagCheck :: Bool -> DynFlags -> (DynFlags, [Located String])
safeFlagCheck _ dflags | not (safeLanguageOn dflags || safeInferOn dflags)
= (dflags, [])
safeFlagCheck cmdl dflags =
case safeLanguageOn dflags of
True -> (dflags', warns)
-- throw error if -fpackage-trust by itself with no safe haskell flag
False | not cmdl && safeInferOn dflags && packageTrustOn dflags
-> (dopt_unset dflags' Opt_PackageTrust,
[L (pkgTrustOnLoc dflags') $
"Warning: -fpackage-trust ignored;" ++
" must be specified with a Safe Haskell flag"]
)
False | null warns && safeInfOk
-> (dflags', [])
| otherwise
-> (dflags' { safeHaskell = Sf_None }, [])
-- Have we infered Unsafe?
-- See Note [HscMain . Safe Haskell Inference]
where
-- TODO: Can we do better than this for inference?
safeInfOk = not $ xopt Opt_OverlappingInstances dflags
(dflags', warns) = foldl check_method (dflags, []) unsafeFlags
check_method (df, warns) (str,loc,test,fix)
| test df = (apFix fix df, warns ++ safeFailure (loc dflags) str)
| otherwise = (df, warns)
apFix f = if safeInferOn dflags then id else f
safeFailure loc str = [L loc $ "Warning: " ++ str ++ " is not allowed in"
++ " Safe Haskell; ignoring " ++ str]
{- **********************************************************************
%* *
DynFlags specifications
%* *
%********************************************************************* -}
allFlags :: [String]
allFlags = map ('-':) $
[ flagName flag | flag <- dynamic_flags ++ package_flags, ok (flagOptKind flag) ] ++
map ("fno-"++) fflags ++
map ("f"++) fflags ++
map ("X"++) supportedExtensions
where ok (PrefixPred _ _) = False
ok _ = True
fflags = fflags0 ++ fflags1 ++ fflags2
fflags0 = [ name | (name, _, _) <- fFlags ]
fflags1 = [ name | (name, _, _) <- fWarningFlags ]
fflags2 = [ name | (name, _, _) <- fLangFlags ]
--------------- The main flags themselves ------------------
dynamic_flags :: [Flag (CmdLineP DynFlags)]
dynamic_flags = [
Flag "n" (NoArg (addWarn "The -n flag is deprecated and no longer has any effect"))
, Flag "cpp" (NoArg (setExtensionFlag Opt_Cpp))
, Flag "F" (NoArg (setDynFlag Opt_Pp))
, Flag "#include"
(HasArg (\s -> do addCmdlineHCInclude s
addWarn "-#include and INCLUDE pragmas are deprecated: They no longer have any effect"))
, Flag "v" (OptIntSuffix setVerbosity)
------- Specific phases --------------------------------------------
-- need to appear before -pgmL to be parsed as LLVM flags.
, Flag "pgmlo" (hasArg (\f -> alterSettings (\s -> s { sPgm_lo = (f,[])})))
, Flag "pgmlc" (hasArg (\f -> alterSettings (\s -> s { sPgm_lc = (f,[])})))
, Flag "pgmL" (hasArg (\f -> alterSettings (\s -> s { sPgm_L = f})))
, Flag "pgmP" (hasArg setPgmP)
, Flag "pgmF" (hasArg (\f -> alterSettings (\s -> s { sPgm_F = f})))
, Flag "pgmc" (hasArg (\f -> alterSettings (\s -> s { sPgm_c = (f,[])})))
, Flag "pgmm" (HasArg (\_ -> addWarn "The -pgmm flag does nothing; it will be removed in a future GHC release"))
, Flag "pgms" (hasArg (\f -> alterSettings (\s -> s { sPgm_s = (f,[])})))
, Flag "pgma" (hasArg (\f -> alterSettings (\s -> s { sPgm_a = (f,[])})))
, Flag "pgml" (hasArg (\f -> alterSettings (\s -> s { sPgm_l = (f,[])})))
, Flag "pgmdll" (hasArg (\f -> alterSettings (\s -> s { sPgm_dll = (f,[])})))
, Flag "pgmwindres" (hasArg (\f -> alterSettings (\s -> s { sPgm_windres = f})))
-- need to appear before -optl/-opta to be parsed as LLVM flags.
, Flag "optlo" (hasArg (\f -> alterSettings (\s -> s { sOpt_lo = f : sOpt_lo s})))
, Flag "optlc" (hasArg (\f -> alterSettings (\s -> s { sOpt_lc = f : sOpt_lc s})))
, Flag "optL" (hasArg (\f -> alterSettings (\s -> s { sOpt_L = f : sOpt_L s})))
, Flag "optP" (hasArg addOptP)
, Flag "optF" (hasArg (\f -> alterSettings (\s -> s { sOpt_F = f : sOpt_F s})))
, Flag "optc" (hasArg (\f -> alterSettings (\s -> s { sOpt_c = f : sOpt_c s})))
, Flag "optm" (HasArg (\_ -> addWarn "The -optm flag does nothing; it will be removed in a future GHC release"))
, Flag "opta" (hasArg (\f -> alterSettings (\s -> s { sOpt_a = f : sOpt_a s})))
, Flag "optl" (hasArg addOptl)
, Flag "optwindres" (hasArg (\f -> alterSettings (\s -> s { sOpt_windres = f : sOpt_windres s})))
, Flag "split-objs"
(NoArg (if can_split
then setDynFlag Opt_SplitObjs
else addWarn "ignoring -fsplit-objs"))
-------- ghc -M -----------------------------------------------------
, Flag "dep-suffix" (hasArg addDepSuffix)
, Flag "optdep-s" (hasArgDF addDepSuffix "Use -dep-suffix instead")
, Flag "dep-makefile" (hasArg setDepMakefile)
, Flag "optdep-f" (hasArgDF setDepMakefile "Use -dep-makefile instead")
, Flag "optdep-w" (NoArg (deprecate "doesn't do anything"))
, Flag "include-pkg-deps" (noArg (setDepIncludePkgDeps True))
, Flag "optdep--include-prelude" (noArgDF (setDepIncludePkgDeps True) "Use -include-pkg-deps instead")
, Flag "optdep--include-pkg-deps" (noArgDF (setDepIncludePkgDeps True) "Use -include-pkg-deps instead")
, Flag "exclude-module" (hasArg addDepExcludeMod)
, Flag "optdep--exclude-module" (hasArgDF addDepExcludeMod "Use -exclude-module instead")
, Flag "optdep-x" (hasArgDF addDepExcludeMod "Use -exclude-module instead")
-------- Linking ----------------------------------------------------
, Flag "no-link" (noArg (\d -> d{ ghcLink=NoLink }))
, Flag "shared" (noArg (\d -> d{ ghcLink=LinkDynLib }))
, Flag "dynload" (hasArg parseDynLibLoaderMode)
, Flag "dylib-install-name" (hasArg setDylibInstallName)
------- Libraries ---------------------------------------------------
, Flag "L" (Prefix addLibraryPath)
, Flag "l" (hasArg (addOptl . ("-l" ++)))
------- Frameworks --------------------------------------------------
-- -framework-path should really be -F ...
, Flag "framework-path" (HasArg addFrameworkPath)
, Flag "framework" (hasArg addCmdlineFramework)
------- Output Redirection ------------------------------------------
, Flag "odir" (hasArg setObjectDir)
, Flag "o" (sepArg (setOutputFile . Just))
, Flag "ohi" (hasArg (setOutputHi . Just ))
, Flag "osuf" (hasArg setObjectSuf)
, Flag "hcsuf" (hasArg setHcSuf)
, Flag "hisuf" (hasArg setHiSuf)
, Flag "hidir" (hasArg setHiDir)
, Flag "tmpdir" (hasArg setTmpDir)
, Flag "stubdir" (hasArg setStubDir)
, Flag "dumpdir" (hasArg setDumpDir)
, Flag "outputdir" (hasArg setOutputDir)
, Flag "ddump-file-prefix" (hasArg (setDumpPrefixForce . Just))
------- Keeping temporary files -------------------------------------
-- These can be singular (think ghc -c) or plural (think ghc --make)
, Flag "keep-hc-file" (NoArg (setDynFlag Opt_KeepHcFiles))
, Flag "keep-hc-files" (NoArg (setDynFlag Opt_KeepHcFiles))
, Flag "keep-s-file" (NoArg (setDynFlag Opt_KeepSFiles))
, Flag "keep-s-files" (NoArg (setDynFlag Opt_KeepSFiles))
, Flag "keep-raw-s-file" (NoArg (addWarn "The -keep-raw-s-file flag does nothing; it will be removed in a future GHC release"))
, Flag "keep-raw-s-files" (NoArg (addWarn "The -keep-raw-s-files flag does nothing; it will be removed in a future GHC release"))
, Flag "keep-llvm-file" (NoArg (do setObjTarget HscLlvm
setDynFlag Opt_KeepLlvmFiles))
, Flag "keep-llvm-files" (NoArg (do setObjTarget HscLlvm
setDynFlag Opt_KeepLlvmFiles))
-- This only makes sense as plural
, Flag "keep-tmp-files" (NoArg (setDynFlag Opt_KeepTmpFiles))
------- Miscellaneous ----------------------------------------------
, Flag "no-auto-link-packages" (NoArg (unSetDynFlag Opt_AutoLinkPackages))
, Flag "no-hs-main" (NoArg (setDynFlag Opt_NoHsMain))
, Flag "with-rtsopts" (HasArg setRtsOpts)
, Flag "rtsopts" (NoArg (setRtsOptsEnabled RtsOptsAll))
, Flag "rtsopts=all" (NoArg (setRtsOptsEnabled RtsOptsAll))
, Flag "rtsopts=some" (NoArg (setRtsOptsEnabled RtsOptsSafeOnly))
, Flag "rtsopts=none" (NoArg (setRtsOptsEnabled RtsOptsNone))
, Flag "no-rtsopts" (NoArg (setRtsOptsEnabled RtsOptsNone))
, Flag "main-is" (SepArg setMainIs)
, Flag "haddock" (NoArg (setDynFlag Opt_Haddock))
, Flag "haddock-opts" (hasArg addHaddockOpts)
, Flag "hpcdir" (SepArg setOptHpcDir)
------- recompilation checker --------------------------------------
, Flag "recomp" (NoArg (do unSetDynFlag Opt_ForceRecomp
deprecate "Use -fno-force-recomp instead"))
, Flag "no-recomp" (NoArg (do setDynFlag Opt_ForceRecomp
deprecate "Use -fforce-recomp instead"))
------ HsCpp opts ---------------------------------------------------
, Flag "D" (AnySuffix (upd . addOptP))
, Flag "U" (AnySuffix (upd . addOptP))
------- Include/Import Paths ----------------------------------------
, Flag "I" (Prefix addIncludePath)
, Flag "i" (OptPrefix addImportPath)
------ Debugging ----------------------------------------------------
, Flag "dstg-stats" (NoArg (setDynFlag Opt_StgStats))
, Flag "ddump-cmm" (setDumpFlag Opt_D_dump_cmm)
, Flag "ddump-raw-cmm" (setDumpFlag Opt_D_dump_raw_cmm)
, Flag "ddump-cmmz" (setDumpFlag Opt_D_dump_cmmz)
, Flag "ddump-cmmz-pretty" (setDumpFlag Opt_D_dump_cmmz_pretty)
, Flag "ddump-cmmz-cbe" (setDumpFlag Opt_D_dump_cmmz_cbe)
, Flag "ddump-cmmz-spills" (setDumpFlag Opt_D_dump_cmmz_spills)
, Flag "ddump-cmmz-proc" (setDumpFlag Opt_D_dump_cmmz_proc)
, Flag "ddump-cmmz-rewrite" (setDumpFlag Opt_D_dump_cmmz_rewrite)
, Flag "ddump-cmmz-dead" (setDumpFlag Opt_D_dump_cmmz_dead)
, Flag "ddump-cmmz-stub" (setDumpFlag Opt_D_dump_cmmz_stub)
, Flag "ddump-cmmz-sp" (setDumpFlag Opt_D_dump_cmmz_sp)
, Flag "ddump-cmmz-procmap" (setDumpFlag Opt_D_dump_cmmz_procmap)
, Flag "ddump-cmmz-split" (setDumpFlag Opt_D_dump_cmmz_split)
, Flag "ddump-cmmz-lower" (setDumpFlag Opt_D_dump_cmmz_lower)
, Flag "ddump-cmmz-info" (setDumpFlag Opt_D_dump_cmmz_info)
, Flag "ddump-cmmz-cafs" (setDumpFlag Opt_D_dump_cmmz_cafs)
, Flag "ddump-core-stats" (setDumpFlag Opt_D_dump_core_stats)
, Flag "ddump-cps-cmm" (setDumpFlag Opt_D_dump_cps_cmm)
, Flag "ddump-cvt-cmm" (setDumpFlag Opt_D_dump_cvt_cmm)
, Flag "ddump-asm" (setDumpFlag Opt_D_dump_asm)
, Flag "ddump-asm-native" (setDumpFlag Opt_D_dump_asm_native)
, Flag "ddump-asm-liveness" (setDumpFlag Opt_D_dump_asm_liveness)
, Flag "ddump-asm-coalesce" (setDumpFlag Opt_D_dump_asm_coalesce)
, Flag "ddump-asm-regalloc" (setDumpFlag Opt_D_dump_asm_regalloc)
, Flag "ddump-asm-conflicts" (setDumpFlag Opt_D_dump_asm_conflicts)
, Flag "ddump-asm-regalloc-stages" (setDumpFlag Opt_D_dump_asm_regalloc_stages)
, Flag "ddump-asm-stats" (setDumpFlag Opt_D_dump_asm_stats)
, Flag "ddump-asm-expanded" (setDumpFlag Opt_D_dump_asm_expanded)
, Flag "ddump-llvm" (NoArg (do setObjTarget HscLlvm
setDumpFlag' Opt_D_dump_llvm))
, Flag "ddump-cpranal" (setDumpFlag Opt_D_dump_cpranal)
, Flag "ddump-deriv" (setDumpFlag Opt_D_dump_deriv)
, Flag "ddump-ds" (setDumpFlag Opt_D_dump_ds)
, Flag "ddump-flatC" (setDumpFlag Opt_D_dump_flatC)
, Flag "ddump-foreign" (setDumpFlag Opt_D_dump_foreign)
, Flag "ddump-inlinings" (setDumpFlag Opt_D_dump_inlinings)
, Flag "ddump-rule-firings" (setDumpFlag Opt_D_dump_rule_firings)
, Flag "ddump-rule-rewrites" (setDumpFlag Opt_D_dump_rule_rewrites)
, Flag "ddump-occur-anal" (setDumpFlag Opt_D_dump_occur_anal)
, Flag "ddump-parsed" (setDumpFlag Opt_D_dump_parsed)
, Flag "ddump-rn" (setDumpFlag Opt_D_dump_rn)
, Flag "ddump-core-pipeline" (setDumpFlag Opt_D_dump_core_pipeline)
, Flag "ddump-simpl" (setDumpFlag Opt_D_dump_simpl)
, Flag "ddump-simpl-iterations" (setDumpFlag Opt_D_dump_simpl_iterations)
, Flag "ddump-simpl-phases" (OptPrefix setDumpSimplPhases)
, Flag "ddump-spec" (setDumpFlag Opt_D_dump_spec)
, Flag "ddump-prep" (setDumpFlag Opt_D_dump_prep)
, Flag "ddump-stg" (setDumpFlag Opt_D_dump_stg)
, Flag "ddump-stranal" (setDumpFlag Opt_D_dump_stranal)
, Flag "ddump-tc" (setDumpFlag Opt_D_dump_tc)
, Flag "ddump-types" (setDumpFlag Opt_D_dump_types)
, Flag "ddump-rules" (setDumpFlag Opt_D_dump_rules)
, Flag "ddump-cse" (setDumpFlag Opt_D_dump_cse)
, Flag "ddump-worker-wrapper" (setDumpFlag Opt_D_dump_worker_wrapper)
, Flag "ddump-rn-trace" (setDumpFlag Opt_D_dump_rn_trace)
, Flag "ddump-if-trace" (setDumpFlag Opt_D_dump_if_trace)
, Flag "ddump-cs-trace" (setDumpFlag Opt_D_dump_cs_trace)
, Flag "ddump-tc-trace" (setDumpFlag Opt_D_dump_tc_trace)
, Flag "ddump-vt-trace" (setDumpFlag Opt_D_dump_vt_trace)
, Flag "ddump-splices" (setDumpFlag Opt_D_dump_splices)
, Flag "ddump-rn-stats" (setDumpFlag Opt_D_dump_rn_stats)
, Flag "ddump-opt-cmm" (setDumpFlag Opt_D_dump_opt_cmm)
, Flag "ddump-simpl-stats" (setDumpFlag Opt_D_dump_simpl_stats)
, Flag "ddump-bcos" (setDumpFlag Opt_D_dump_BCOs)
, Flag "dsource-stats" (setDumpFlag Opt_D_source_stats)
, Flag "dverbose-core2core" (NoArg (do setVerbosity (Just 2)
setVerboseCore2Core))
, Flag "dverbose-stg2stg" (setDumpFlag Opt_D_verbose_stg2stg)
, Flag "ddump-hi" (setDumpFlag Opt_D_dump_hi)
, Flag "ddump-minimal-imports" (setDumpFlag Opt_D_dump_minimal_imports)
, Flag "ddump-vect" (setDumpFlag Opt_D_dump_vect)
, Flag "ddump-hpc" (setDumpFlag Opt_D_dump_ticked) -- back compat
, Flag "ddump-ticked" (setDumpFlag Opt_D_dump_ticked)
, Flag "ddump-mod-cycles" (setDumpFlag Opt_D_dump_mod_cycles)
, Flag "ddump-view-pattern-commoning" (setDumpFlag Opt_D_dump_view_pattern_commoning)
, Flag "ddump-to-file" (setDumpFlag Opt_DumpToFile)
, Flag "ddump-hi-diffs" (setDumpFlag Opt_D_dump_hi_diffs)
, Flag "ddump-rtti" (setDumpFlag Opt_D_dump_rtti)
, Flag "dcore-lint" (NoArg (setDynFlag Opt_DoCoreLinting))
, Flag "dstg-lint" (NoArg (setDynFlag Opt_DoStgLinting))
, Flag "dcmm-lint" (NoArg (setDynFlag Opt_DoCmmLinting))
, Flag "dasm-lint" (NoArg (setDynFlag Opt_DoAsmLinting))
, Flag "dshow-passes" (NoArg (do forceRecompile
setVerbosity $ Just 2))
, Flag "dfaststring-stats" (NoArg (setDynFlag Opt_D_faststring_stats))
, Flag "dno-llvm-mangler" (NoArg (setDynFlag Opt_NoLlvmMangler))
------ Machine dependant (-m<blah>) stuff ---------------------------
, Flag "monly-2-regs" (NoArg (addWarn "The -monly-2-regs flag does nothing; it will be removed in a future GHC release"))
, Flag "monly-3-regs" (NoArg (addWarn "The -monly-3-regs flag does nothing; it will be removed in a future GHC release"))
, Flag "monly-4-regs" (NoArg (addWarn "The -monly-4-regs flag does nothing; it will be removed in a future GHC release"))
, Flag "msse2" (NoArg (setDynFlag Opt_SSE2))
, Flag "msse4.2" (NoArg (setDynFlag Opt_SSE4_2))
------ Warning opts -------------------------------------------------
, Flag "W" (NoArg (mapM_ setWarningFlag minusWOpts))
, Flag "Werror" (NoArg (setDynFlag Opt_WarnIsError))
, Flag "Wwarn" (NoArg (unSetDynFlag Opt_WarnIsError))
, Flag "Wall" (NoArg (mapM_ setWarningFlag minusWallOpts))
, Flag "Wnot" (NoArg (do upd (\dfs -> dfs {warningFlags = IntSet.empty})
deprecate "Use -w instead"))
, Flag "w" (NoArg (upd (\dfs -> dfs {warningFlags = IntSet.empty})))
------ Plugin flags ------------------------------------------------
, Flag "fplugin-opt" (hasArg addPluginModuleNameOption)
, Flag "fplugin" (hasArg addPluginModuleName)
------ Optimisation flags ------------------------------------------
, Flag "O" (noArgM (setOptLevel 1))
, Flag "Onot" (noArgM (\dflags -> do deprecate "Use -O0 instead"
setOptLevel 0 dflags))
, Flag "Odph" (noArgM setDPHOpt)
, Flag "O" (optIntSuffixM (\mb_n -> setOptLevel (mb_n `orElse` 1)))
-- If the number is missing, use 1
, Flag "fsimplifier-phases" (intSuffix (\n d -> d{ simplPhases = n }))
, Flag "fmax-simplifier-iterations" (intSuffix (\n d -> d{ maxSimplIterations = n }))
, Flag "fsimpl-tick-factor" (intSuffix (\n d -> d{ simplTickFactor = n }))
, Flag "fspec-constr-threshold" (intSuffix (\n d -> d{ specConstrThreshold = Just n }))
, Flag "fno-spec-constr-threshold" (noArg (\d -> d{ specConstrThreshold = Nothing }))
, Flag "fspec-constr-count" (intSuffix (\n d -> d{ specConstrCount = Just n }))
, Flag "fno-spec-constr-count" (noArg (\d -> d{ specConstrCount = Nothing }))
, Flag "fliberate-case-threshold" (intSuffix (\n d -> d{ liberateCaseThreshold = Just n }))
, Flag "fno-liberate-case-threshold" (noArg (\d -> d{ liberateCaseThreshold = Nothing }))
, Flag "frule-check" (sepArg (\s d -> d{ ruleCheck = Just s }))
, Flag "fcontext-stack" (intSuffix (\n d -> d{ ctxtStkDepth = n }))
, Flag "fstrictness-before" (intSuffix (\n d -> d{ strictnessBefore = n : strictnessBefore d }))
, Flag "ffloat-lam-args" (intSuffix (\n d -> d{ floatLamArgs = Just n }))
, Flag "ffloat-all-lams" (noArg (\d -> d{ floatLamArgs = Nothing }))
------ Profiling ----------------------------------------------------
-- OLD profiling flags
, Flag "auto-all" (noArg (\d -> d { profAuto = ProfAutoAll } ))
, Flag "no-auto-all" (noArg (\d -> d { profAuto = NoProfAuto } ))
, Flag "auto" (noArg (\d -> d { profAuto = ProfAutoExports } ))
, Flag "no-auto" (noArg (\d -> d { profAuto = NoProfAuto } ))
, Flag "caf-all" (NoArg (setDynFlag Opt_AutoSccsOnIndividualCafs))
, Flag "no-caf-all" (NoArg (unSetDynFlag Opt_AutoSccsOnIndividualCafs))
-- NEW profiling flags
, Flag "fprof-auto" (noArg (\d -> d { profAuto = ProfAutoAll } ))
, Flag "fprof-auto-top" (noArg (\d -> d { profAuto = ProfAutoTop } ))
, Flag "fprof-auto-exported" (noArg (\d -> d { profAuto = ProfAutoExports } ))
, Flag "fprof-auto-calls" (noArg (\d -> d { profAuto = ProfAutoCalls } ))
, Flag "fno-prof-auto" (noArg (\d -> d { profAuto = NoProfAuto } ))
------ Compiler flags -----------------------------------------------
, Flag "fasm" (NoArg (setObjTarget HscAsm))
, Flag "fvia-c" (NoArg
(addWarn "The -fvia-c flag does nothing; it will be removed in a future GHC release"))
, Flag "fvia-C" (NoArg
(addWarn "The -fvia-C flag does nothing; it will be removed in a future GHC release"))
, Flag "fllvm" (NoArg (setObjTarget HscLlvm))
, Flag "fno-code" (NoArg (do upd $ \d -> d{ ghcLink=NoLink }
setTarget HscNothing))
, Flag "fbyte-code" (NoArg (setTarget HscInterpreted))
, Flag "fobject-code" (NoArg (setTarget defaultHscTarget))
, Flag "fglasgow-exts" (NoArg (enableGlasgowExts >> deprecate "Use individual extensions instead"))
, Flag "fno-glasgow-exts" (NoArg (disableGlasgowExts >> deprecate "Use individual extensions instead"))
------ Safe Haskell flags -------------------------------------------
, Flag "fpackage-trust" (NoArg setPackageTrust)
, Flag "fno-safe-infer" (NoArg (setSafeHaskell Sf_None))
]
++ map (mkFlag turnOn "f" setDynFlag ) fFlags
++ map (mkFlag turnOff "fno-" unSetDynFlag) fFlags
++ map (mkFlag turnOn "f" setWarningFlag ) fWarningFlags
++ map (mkFlag turnOff "fno-" unSetWarningFlag) fWarningFlags
++ map (mkFlag turnOn "f" setExtensionFlag ) fLangFlags
++ map (mkFlag turnOff "fno-" unSetExtensionFlag) fLangFlags
++ map (mkFlag turnOn "X" setExtensionFlag ) xFlags
++ map (mkFlag turnOff "XNo" unSetExtensionFlag) xFlags
++ map (mkFlag turnOn "X" setLanguage) languageFlags
++ map (mkFlag turnOn "X" setSafeHaskell) safeHaskellFlags
++ [ Flag "XGenerics" (NoArg (deprecate "it does nothing; look into -XDefaultSignatures and -XDeriveGeneric for generic programming support."))
, Flag "XNoGenerics" (NoArg (deprecate "it does nothing; look into -XDefaultSignatures and -XDeriveGeneric for generic programming support.")) ]
package_flags :: [Flag (CmdLineP DynFlags)]
package_flags = [
------- Packages ----------------------------------------------------
Flag "package-conf" (HasArg extraPkgConf_)
, Flag "no-user-package-conf" (NoArg (unSetDynFlag Opt_ReadUserPackageConf))
, Flag "package-name" (hasArg setPackageName)
, Flag "package-id" (HasArg exposePackageId)
, Flag "package" (HasArg exposePackage)
, Flag "hide-package" (HasArg hidePackage)
, Flag "hide-all-packages" (NoArg (setDynFlag Opt_HideAllPackages))
, Flag "ignore-package" (HasArg ignorePackage)
, Flag "syslib" (HasArg (\s -> do exposePackage s
deprecate "Use -package instead"))
, Flag "trust" (HasArg trustPackage)
, Flag "distrust" (HasArg distrustPackage)
, Flag "distrust-all-packages" (NoArg (setDynFlag Opt_DistrustAllPackages))
]
type TurnOnFlag = Bool -- True <=> we are turning the flag on
-- False <=> we are turning the flag off
turnOn :: TurnOnFlag; turnOn = True
turnOff :: TurnOnFlag; turnOff = False
type FlagSpec flag
= ( String -- Flag in string form
, flag -- Flag in internal form
, TurnOnFlag -> DynP ()) -- Extra action to run when the flag is found
-- Typically, emit a warning or error
mkFlag :: TurnOnFlag -- ^ True <=> it should be turned on
-> String -- ^ The flag prefix
-> (flag -> DynP ()) -- ^ What to do when the flag is found
-> FlagSpec flag -- ^ Specification of this particular flag
-> Flag (CmdLineP DynFlags)
mkFlag turn_on flagPrefix f (name, flag, extra_action)
= Flag (flagPrefix ++ name) (NoArg (f flag >> extra_action turn_on))
deprecatedForExtension :: String -> TurnOnFlag -> DynP ()
deprecatedForExtension lang turn_on
= deprecate ("use -X" ++ flag ++ " or pragma {-# LANGUAGE " ++ flag ++ " #-} instead")
where
flag | turn_on = lang
| otherwise = "No"++lang
useInstead :: String -> TurnOnFlag -> DynP ()
useInstead flag turn_on
= deprecate ("Use -f" ++ no ++ flag ++ " instead")
where
no = if turn_on then "" else "no-"
nop :: TurnOnFlag -> DynP ()
nop _ = return ()
-- | These @-f\<blah\>@ flags can all be reversed with @-fno-\<blah\>@
fWarningFlags :: [FlagSpec WarningFlag]
fWarningFlags = [
( "warn-dodgy-foreign-imports", Opt_WarnDodgyForeignImports, nop ),
( "warn-dodgy-exports", Opt_WarnDodgyExports, nop ),
( "warn-dodgy-imports", Opt_WarnDodgyImports, nop ),
( "warn-duplicate-exports", Opt_WarnDuplicateExports, nop ),
( "warn-hi-shadowing", Opt_WarnHiShadows, nop ),
( "warn-implicit-prelude", Opt_WarnImplicitPrelude, nop ),
( "warn-incomplete-patterns", Opt_WarnIncompletePatterns, nop ),
( "warn-incomplete-uni-patterns", Opt_WarnIncompleteUniPatterns, nop ),
( "warn-incomplete-record-updates", Opt_WarnIncompletePatternsRecUpd, nop ),
( "warn-missing-fields", Opt_WarnMissingFields, nop ),
( "warn-missing-import-lists", Opt_WarnMissingImportList, nop ),
( "warn-missing-methods", Opt_WarnMissingMethods, nop ),
( "warn-missing-signatures", Opt_WarnMissingSigs, nop ),
( "warn-missing-local-sigs", Opt_WarnMissingLocalSigs, nop ),
( "warn-name-shadowing", Opt_WarnNameShadowing, nop ),
( "warn-overlapping-patterns", Opt_WarnOverlappingPatterns, nop ),
( "warn-type-defaults", Opt_WarnTypeDefaults, nop ),
( "warn-monomorphism-restriction", Opt_WarnMonomorphism, nop ),
( "warn-unused-binds", Opt_WarnUnusedBinds, nop ),
( "warn-unused-imports", Opt_WarnUnusedImports, nop ),
( "warn-unused-matches", Opt_WarnUnusedMatches, nop ),
( "warn-warnings-deprecations", Opt_WarnWarningsDeprecations, nop ),
( "warn-deprecations", Opt_WarnWarningsDeprecations, nop ),
( "warn-deprecated-flags", Opt_WarnDeprecatedFlags, nop ),
( "warn-orphans", Opt_WarnOrphans, nop ),
( "warn-identities", Opt_WarnIdentities, nop ),
( "warn-auto-orphans", Opt_WarnAutoOrphans, nop ),
( "warn-tabs", Opt_WarnTabs, nop ),
( "warn-unrecognised-pragmas", Opt_WarnUnrecognisedPragmas, nop ),
( "warn-lazy-unlifted-bindings", Opt_WarnLazyUnliftedBindings, nop ),
( "warn-unused-do-bind", Opt_WarnUnusedDoBind, nop ),
( "warn-wrong-do-bind", Opt_WarnWrongDoBind, nop ),
( "warn-alternative-layout-rule-transitional", Opt_WarnAlternativeLayoutRuleTransitional, nop ),
( "warn-unsafe", Opt_WarnUnsafe, setWarnUnsafe ),
( "warn-safe", Opt_WarnSafe, setWarnSafe ) ]
-- | These @-f\<blah\>@ flags can all be reversed with @-fno-\<blah\>@
fFlags :: [FlagSpec DynFlag]
fFlags = [
( "print-explicit-foralls", Opt_PrintExplicitForalls, nop ),
( "strictness", Opt_Strictness, nop ),
( "specialise", Opt_Specialise, nop ),
( "float-in", Opt_FloatIn, nop ),
( "static-argument-transformation", Opt_StaticArgumentTransformation, nop ),
( "full-laziness", Opt_FullLaziness, nop ),
( "liberate-case", Opt_LiberateCase, nop ),
( "spec-constr", Opt_SpecConstr, nop ),
( "cse", Opt_CSE, nop ),
( "pedantic-bottoms", Opt_PedanticBottoms, nop ),
( "ignore-interface-pragmas", Opt_IgnoreInterfacePragmas, nop ),
( "omit-interface-pragmas", Opt_OmitInterfacePragmas, nop ),
( "expose-all-unfoldings", Opt_ExposeAllUnfoldings, nop ),
( "do-lambda-eta-expansion", Opt_DoLambdaEtaExpansion, nop ),
( "ignore-asserts", Opt_IgnoreAsserts, nop ),
( "do-eta-reduction", Opt_DoEtaReduction, nop ),
( "case-merge", Opt_CaseMerge, nop ),
( "unbox-strict-fields", Opt_UnboxStrictFields, nop ),
( "dicts-cheap", Opt_DictsCheap, nop ),
( "excess-precision", Opt_ExcessPrecision, nop ),
( "eager-blackholing", Opt_EagerBlackHoling, nop ),
( "print-bind-result", Opt_PrintBindResult, nop ),
( "force-recomp", Opt_ForceRecomp, nop ),
( "hpc-no-auto", Opt_Hpc_No_Auto, nop ),
( "rewrite-rules", Opt_EnableRewriteRules, useInstead "enable-rewrite-rules" ),
( "enable-rewrite-rules", Opt_EnableRewriteRules, nop ),
( "break-on-exception", Opt_BreakOnException, nop ),
( "break-on-error", Opt_BreakOnError, nop ),
( "print-evld-with-show", Opt_PrintEvldWithShow, nop ),
( "print-bind-contents", Opt_PrintBindContents, nop ),
( "run-cps", Opt_RunCPS, nop ),
( "run-cpsz", Opt_RunCPSZ, nop ),
( "new-codegen", Opt_TryNewCodeGen, nop ),
( "vectorise", Opt_Vectorise, nop ),
( "regs-graph", Opt_RegsGraph, nop ),
( "regs-iterative", Opt_RegsIterative, nop ),
( "gen-manifest", Opt_GenManifest, nop ),
( "embed-manifest", Opt_EmbedManifest, nop ),
( "ext-core", Opt_EmitExternalCore, nop ),
( "shared-implib", Opt_SharedImplib, nop ),
( "ghci-sandbox", Opt_GhciSandbox, nop ),
( "ghci-history", Opt_GhciHistory, nop ),
( "helpful-errors", Opt_HelpfulErrors, nop ),
( "building-cabal-package", Opt_BuildingCabalPackage, nop ),
( "implicit-import-qualified", Opt_ImplicitImportQualified, nop ),
( "prof-count-entries", Opt_ProfCountEntries, nop ),
( "prof-cafs", Opt_AutoSccsOnIndividualCafs, nop )
]
-- | These @-f\<blah\>@ flags can all be reversed with @-fno-\<blah\>@
fLangFlags :: [FlagSpec ExtensionFlag]
fLangFlags = [
( "th", Opt_TemplateHaskell,
deprecatedForExtension "TemplateHaskell" >> checkTemplateHaskellOk ),
( "fi", Opt_ForeignFunctionInterface,
deprecatedForExtension "ForeignFunctionInterface" ),
( "ffi", Opt_ForeignFunctionInterface,
deprecatedForExtension "ForeignFunctionInterface" ),
( "arrows", Opt_Arrows,
deprecatedForExtension "Arrows" ),
( "implicit-prelude", Opt_ImplicitPrelude,
deprecatedForExtension "ImplicitPrelude" ),
( "bang-patterns", Opt_BangPatterns,
deprecatedForExtension "BangPatterns" ),
( "monomorphism-restriction", Opt_MonomorphismRestriction,
deprecatedForExtension "MonomorphismRestriction" ),
( "mono-pat-binds", Opt_MonoPatBinds,
deprecatedForExtension "MonoPatBinds" ),
( "extended-default-rules", Opt_ExtendedDefaultRules,
deprecatedForExtension "ExtendedDefaultRules" ),
( "implicit-params", Opt_ImplicitParams,
deprecatedForExtension "ImplicitParams" ),
( "scoped-type-variables", Opt_ScopedTypeVariables,
deprecatedForExtension "ScopedTypeVariables" ),
( "parr", Opt_ParallelArrays,
deprecatedForExtension "ParallelArrays" ),
( "PArr", Opt_ParallelArrays,
deprecatedForExtension "ParallelArrays" ),
( "allow-overlapping-instances", Opt_OverlappingInstances,
deprecatedForExtension "OverlappingInstances" ),
( "allow-undecidable-instances", Opt_UndecidableInstances,
deprecatedForExtension "UndecidableInstances" ),
( "allow-incoherent-instances", Opt_IncoherentInstances,
deprecatedForExtension "IncoherentInstances" )
]
supportedLanguages :: [String]
supportedLanguages = [ name | (name, _, _) <- languageFlags ]
supportedLanguageOverlays :: [String]
supportedLanguageOverlays = [ name | (name, _, _) <- safeHaskellFlags ]
supportedExtensions :: [String]
supportedExtensions = [ name' | (name, _, _) <- xFlags, name' <- [name, "No" ++ name] ]
supportedLanguagesAndExtensions :: [String]
supportedLanguagesAndExtensions =
supportedLanguages ++ supportedLanguageOverlays ++ supportedExtensions
-- | These -X<blah> flags cannot be reversed with -XNo<blah>
languageFlags :: [FlagSpec Language]
languageFlags = [
( "Haskell98", Haskell98, nop ),
( "Haskell2010", Haskell2010, nop )
]
-- | These -X<blah> flags cannot be reversed with -XNo<blah>
-- They are used to place hard requirements on what GHC Haskell language
-- features can be used.
safeHaskellFlags :: [FlagSpec SafeHaskellMode]
safeHaskellFlags = [mkF Sf_Unsafe, mkF Sf_Trustworthy, mkF Sf_Safe]
where mkF flag = (showPpr flag, flag, nop)
-- | These -X<blah> flags can all be reversed with -XNo<blah>
xFlags :: [FlagSpec ExtensionFlag]
xFlags = [
( "CPP", Opt_Cpp, nop ),
( "PostfixOperators", Opt_PostfixOperators, nop ),
( "TupleSections", Opt_TupleSections, nop ),
( "PatternGuards", Opt_PatternGuards, nop ),
( "UnicodeSyntax", Opt_UnicodeSyntax, nop ),
( "MagicHash", Opt_MagicHash, nop ),
( "PolymorphicComponents", Opt_PolymorphicComponents, nop ),
( "ExistentialQuantification", Opt_ExistentialQuantification, nop ),
( "KindSignatures", Opt_KindSignatures, nop ),
( "EmptyDataDecls", Opt_EmptyDataDecls, nop ),
( "ParallelListComp", Opt_ParallelListComp, nop ),
( "TransformListComp", Opt_TransformListComp, nop ),
( "MonadComprehensions", Opt_MonadComprehensions, nop),
( "ForeignFunctionInterface", Opt_ForeignFunctionInterface, nop ),
( "UnliftedFFITypes", Opt_UnliftedFFITypes, nop ),
( "InterruptibleFFI", Opt_InterruptibleFFI, nop ),
( "CApiFFI", Opt_CApiFFI, nop ),
( "GHCForeignImportPrim", Opt_GHCForeignImportPrim, nop ),
( "LiberalTypeSynonyms", Opt_LiberalTypeSynonyms, nop ),
( "Rank2Types", Opt_Rank2Types, nop ),
( "RankNTypes", Opt_RankNTypes, nop ),
( "ImpredicativeTypes", Opt_ImpredicativeTypes, nop),
( "TypeOperators", Opt_TypeOperators, nop ),
( "RecursiveDo", Opt_RecursiveDo, -- Enables 'mdo'
deprecatedForExtension "DoRec"),
( "DoRec", Opt_DoRec, nop ), -- Enables 'rec' keyword
( "Arrows", Opt_Arrows, nop ),
( "ParallelArrays", Opt_ParallelArrays, nop ),
( "TemplateHaskell", Opt_TemplateHaskell, checkTemplateHaskellOk ),
( "QuasiQuotes", Opt_QuasiQuotes, nop ),
( "ImplicitPrelude", Opt_ImplicitPrelude, nop ),
( "RecordWildCards", Opt_RecordWildCards, nop ),
( "NamedFieldPuns", Opt_RecordPuns, nop ),
( "RecordPuns", Opt_RecordPuns,
deprecatedForExtension "NamedFieldPuns" ),
( "DisambiguateRecordFields", Opt_DisambiguateRecordFields, nop ),
( "OverloadedStrings", Opt_OverloadedStrings, nop ),
( "GADTs", Opt_GADTs, nop ),
( "GADTSyntax", Opt_GADTSyntax, nop ),
( "ViewPatterns", Opt_ViewPatterns, nop ),
( "TypeFamilies", Opt_TypeFamilies, nop ),
( "BangPatterns", Opt_BangPatterns, nop ),
( "MonomorphismRestriction", Opt_MonomorphismRestriction, nop ),
( "NPlusKPatterns", Opt_NPlusKPatterns, nop ),
( "DoAndIfThenElse", Opt_DoAndIfThenElse, nop ),
( "RebindableSyntax", Opt_RebindableSyntax, nop ),
( "ConstraintKinds", Opt_ConstraintKinds, nop ),
( "PolyKinds", Opt_PolyKinds, nop ),
( "DataKinds", Opt_DataKinds, nop ),
( "MonoPatBinds", Opt_MonoPatBinds,
\ turn_on -> when turn_on $ deprecate "Experimental feature now removed; has no effect" ),
( "ExplicitForAll", Opt_ExplicitForAll, nop ),
( "AlternativeLayoutRule", Opt_AlternativeLayoutRule, nop ),
( "AlternativeLayoutRuleTransitional",Opt_AlternativeLayoutRuleTransitional, nop ),
( "DatatypeContexts", Opt_DatatypeContexts,
\ turn_on -> when turn_on $ deprecate "It was widely considered a misfeature, and has been removed from the Haskell language." ),
( "NondecreasingIndentation", Opt_NondecreasingIndentation, nop ),
( "RelaxedLayout", Opt_RelaxedLayout, nop ),
( "TraditionalRecordSyntax", Opt_TraditionalRecordSyntax, nop ),
( "MonoLocalBinds", Opt_MonoLocalBinds, nop ),
( "RelaxedPolyRec", Opt_RelaxedPolyRec,
\ turn_on -> if not turn_on
then deprecate "You can't turn off RelaxedPolyRec any more"
else return () ),
( "ExtendedDefaultRules", Opt_ExtendedDefaultRules, nop ),
( "ImplicitParams", Opt_ImplicitParams, nop ),
( "ScopedTypeVariables", Opt_ScopedTypeVariables, nop ),
( "PatternSignatures", Opt_ScopedTypeVariables,
deprecatedForExtension "ScopedTypeVariables" ),
( "UnboxedTuples", Opt_UnboxedTuples, nop ),
( "StandaloneDeriving", Opt_StandaloneDeriving, nop ),
( "DeriveDataTypeable", Opt_DeriveDataTypeable, nop ),
( "DeriveFunctor", Opt_DeriveFunctor, nop ),
( "DeriveTraversable", Opt_DeriveTraversable, nop ),
( "DeriveFoldable", Opt_DeriveFoldable, nop ),
( "DeriveGeneric", Opt_DeriveGeneric, nop ),
( "DefaultSignatures", Opt_DefaultSignatures, nop ),
( "TypeSynonymInstances", Opt_TypeSynonymInstances, nop ),
( "FlexibleContexts", Opt_FlexibleContexts, nop ),
( "FlexibleInstances", Opt_FlexibleInstances, nop ),
( "ConstrainedClassMethods", Opt_ConstrainedClassMethods, nop ),
( "MultiParamTypeClasses", Opt_MultiParamTypeClasses, nop ),
( "FunctionalDependencies", Opt_FunctionalDependencies, nop ),
( "GeneralizedNewtypeDeriving", Opt_GeneralizedNewtypeDeriving, setGenDeriving ),
( "OverlappingInstances", Opt_OverlappingInstances, nop ),
( "UndecidableInstances", Opt_UndecidableInstances, nop ),
( "IncoherentInstances", Opt_IncoherentInstances, nop ),
( "PackageImports", Opt_PackageImports, nop ),
( "ApplicativeFix", Opt_ApplicativeFix, nop )
]
defaultFlags :: [DynFlag]
defaultFlags
= [ Opt_AutoLinkPackages,
Opt_ReadUserPackageConf,
Opt_SharedImplib,
#if GHC_DEFAULT_NEW_CODEGEN
Opt_TryNewCodeGen,
#endif
Opt_GenManifest,
Opt_EmbedManifest,
Opt_PrintBindContents,
Opt_GhciSandbox,
Opt_GhciHistory,
Opt_HelpfulErrors,
Opt_ProfCountEntries
]
++ [f | (ns,f) <- optLevelFlags, 0 `elem` ns]
-- The default -O0 options
impliedFlags :: [(ExtensionFlag, TurnOnFlag, ExtensionFlag)]
impliedFlags
= [ (Opt_RankNTypes, turnOn, Opt_ExplicitForAll)
, (Opt_Rank2Types, turnOn, Opt_ExplicitForAll)
, (Opt_ScopedTypeVariables, turnOn, Opt_ExplicitForAll)
, (Opt_LiberalTypeSynonyms, turnOn, Opt_ExplicitForAll)
, (Opt_ExistentialQuantification, turnOn, Opt_ExplicitForAll)
, (Opt_PolymorphicComponents, turnOn, Opt_ExplicitForAll)
, (Opt_FlexibleInstances, turnOn, Opt_TypeSynonymInstances)
, (Opt_FunctionalDependencies, turnOn, Opt_MultiParamTypeClasses)
, (Opt_RebindableSyntax, turnOff, Opt_ImplicitPrelude) -- NB: turn off!
, (Opt_GADTs, turnOn, Opt_GADTSyntax)
, (Opt_GADTs, turnOn, Opt_MonoLocalBinds)
, (Opt_TypeFamilies, turnOn, Opt_MonoLocalBinds)
, (Opt_TypeFamilies, turnOn, Opt_KindSignatures) -- Type families use kind signatures
-- all over the place
, (Opt_ImpredicativeTypes, turnOn, Opt_RankNTypes)
-- Record wild-cards implies field disambiguation
-- Otherwise if you write (C {..}) you may well get
-- stuff like " 'a' not in scope ", which is a bit silly
-- if the compiler has just filled in field 'a' of constructor 'C'
, (Opt_RecordWildCards, turnOn, Opt_DisambiguateRecordFields)
, (Opt_ParallelArrays, turnOn, Opt_ParallelListComp)
]
optLevelFlags :: [([Int], DynFlag)]
optLevelFlags
= [ ([0], Opt_IgnoreInterfacePragmas)
, ([0], Opt_OmitInterfacePragmas)
, ([1,2], Opt_IgnoreAsserts)
, ([1,2], Opt_EnableRewriteRules) -- Off for -O0; see Note [Scoping for Builtin rules]
-- in PrelRules
, ([1,2], Opt_DoEtaReduction)
, ([1,2], Opt_CaseMerge)
, ([1,2], Opt_Strictness)
, ([1,2], Opt_CSE)
, ([1,2], Opt_FullLaziness)
, ([1,2], Opt_Specialise)
, ([1,2], Opt_FloatIn)
, ([2], Opt_LiberateCase)
, ([2], Opt_SpecConstr)
, ([2], Opt_RegsGraph)
-- , ([2], Opt_StaticArgumentTransformation)
-- Max writes: I think it's probably best not to enable SAT with -O2 for the
-- 6.10 release. The version of SAT in HEAD at the moment doesn't incorporate
-- several improvements to the heuristics, and I'm concerned that without
-- those changes SAT will interfere with some attempts to write "high
-- performance Haskell", as we saw in some posts on Haskell-Cafe earlier
-- this year. In particular, the version in HEAD lacks the tail call
-- criterion, so many things that look like reasonable loops will be
-- turned into functions with extra (unneccesary) thunk creation.
, ([0,1,2], Opt_DoLambdaEtaExpansion)
-- This one is important for a tiresome reason:
-- we want to make sure that the bindings for data
-- constructors are eta-expanded. This is probably
-- a good thing anyway, but it seems fragile.
]
-- -----------------------------------------------------------------------------
-- Standard sets of warning options
standardWarnings :: [WarningFlag]
standardWarnings
= [ Opt_WarnWarningsDeprecations,
Opt_WarnDeprecatedFlags,
Opt_WarnUnrecognisedPragmas,
Opt_WarnOverlappingPatterns,
Opt_WarnMissingFields,
Opt_WarnMissingMethods,
Opt_WarnDuplicateExports,
Opt_WarnLazyUnliftedBindings,
Opt_WarnDodgyForeignImports,
Opt_WarnWrongDoBind,
Opt_WarnAlternativeLayoutRuleTransitional
]
minusWOpts :: [WarningFlag]
-- Things you get with -W
minusWOpts
= standardWarnings ++
[ Opt_WarnUnusedBinds,
Opt_WarnUnusedMatches,
Opt_WarnUnusedImports,
Opt_WarnIncompletePatterns,
Opt_WarnDodgyExports,
Opt_WarnDodgyImports
]
minusWallOpts :: [WarningFlag]
-- Things you get with -Wall
minusWallOpts
= minusWOpts ++
[ Opt_WarnTypeDefaults,
Opt_WarnNameShadowing,
Opt_WarnMissingSigs,
Opt_WarnHiShadows,
Opt_WarnOrphans,
Opt_WarnUnusedDoBind
]
enableGlasgowExts :: DynP ()
enableGlasgowExts = do setDynFlag Opt_PrintExplicitForalls
mapM_ setExtensionFlag glasgowExtsFlags
disableGlasgowExts :: DynP ()
disableGlasgowExts = do unSetDynFlag Opt_PrintExplicitForalls
mapM_ unSetExtensionFlag glasgowExtsFlags
glasgowExtsFlags :: [ExtensionFlag]
glasgowExtsFlags = [
Opt_ForeignFunctionInterface
, Opt_UnliftedFFITypes
, Opt_ImplicitParams
, Opt_ScopedTypeVariables
, Opt_UnboxedTuples
, Opt_TypeSynonymInstances
, Opt_StandaloneDeriving
, Opt_DeriveDataTypeable
, Opt_DeriveFunctor
, Opt_DeriveFoldable
, Opt_DeriveTraversable
, Opt_DeriveGeneric
, Opt_FlexibleContexts
, Opt_FlexibleInstances
, Opt_ConstrainedClassMethods
, Opt_MultiParamTypeClasses
, Opt_FunctionalDependencies
, Opt_MagicHash
, Opt_PolymorphicComponents
, Opt_ExistentialQuantification
, Opt_UnicodeSyntax
, Opt_PostfixOperators
, Opt_PatternGuards
, Opt_LiberalTypeSynonyms
, Opt_RankNTypes
, Opt_TypeOperators
, Opt_DoRec
, Opt_ParallelListComp
, Opt_EmptyDataDecls
, Opt_KindSignatures
, Opt_GeneralizedNewtypeDeriving ]
#ifdef GHCI
-- Consult the RTS to find whether GHC itself has been built profiled
-- If so, you can't use Template Haskell
foreign import ccall unsafe "rts_isProfiled" rtsIsProfiledIO :: IO CInt
rtsIsProfiled :: Bool
rtsIsProfiled = unsafePerformIO rtsIsProfiledIO /= 0
#endif
setWarnSafe :: Bool -> DynP ()
setWarnSafe True = getCurLoc >>= \l -> upd (\d -> d { warnSafeOnLoc = l })
setWarnSafe False = return ()
setWarnUnsafe :: Bool -> DynP ()
setWarnUnsafe True = getCurLoc >>= \l -> upd (\d -> d { warnUnsafeOnLoc = l })
setWarnUnsafe False = return ()
setPackageTrust :: DynP ()
setPackageTrust = do
setDynFlag Opt_PackageTrust
l <- getCurLoc
upd $ \d -> d { pkgTrustOnLoc = l }
setGenDeriving :: Bool -> DynP ()
setGenDeriving True = getCurLoc >>= \l -> upd (\d -> d { newDerivOnLoc = l })
setGenDeriving False = return ()
checkTemplateHaskellOk :: Bool -> DynP ()
#ifdef GHCI
checkTemplateHaskellOk turn_on
| turn_on && rtsIsProfiled
= addErr "You can't use Template Haskell with a profiled compiler"
| otherwise
= getCurLoc >>= \l -> upd (\d -> d { thOnLoc = l })
#else
-- In stage 1 we don't know that the RTS has rts_isProfiled,
-- so we simply say "ok". It doesn't matter because TH isn't
-- available in stage 1 anyway.
checkTemplateHaskellOk _ = return ()
#endif
{- **********************************************************************
%* *
DynFlags constructors
%* *
%********************************************************************* -}
type DynP = EwM (CmdLineP DynFlags)
upd :: (DynFlags -> DynFlags) -> DynP ()
upd f = liftEwM (do dflags <- getCmdLineState
putCmdLineState $! f dflags)
updM :: (DynFlags -> DynP DynFlags) -> DynP ()
updM f = do dflags <- liftEwM getCmdLineState
dflags' <- f dflags
liftEwM $ putCmdLineState $! dflags'
--------------- Constructor functions for OptKind -----------------
noArg :: (DynFlags -> DynFlags) -> OptKind (CmdLineP DynFlags)
noArg fn = NoArg (upd fn)
noArgM :: (DynFlags -> DynP DynFlags) -> OptKind (CmdLineP DynFlags)
noArgM fn = NoArg (updM fn)
noArgDF :: (DynFlags -> DynFlags) -> String -> OptKind (CmdLineP DynFlags)
noArgDF fn deprec = NoArg (upd fn >> deprecate deprec)
hasArg :: (String -> DynFlags -> DynFlags) -> OptKind (CmdLineP DynFlags)
hasArg fn = HasArg (upd . fn)
hasArgDF :: (String -> DynFlags -> DynFlags) -> String -> OptKind (CmdLineP DynFlags)
hasArgDF fn deprec = HasArg (\s -> do upd (fn s)
deprecate deprec)
sepArg :: (String -> DynFlags -> DynFlags) -> OptKind (CmdLineP DynFlags)
sepArg fn = SepArg (upd . fn)
intSuffix :: (Int -> DynFlags -> DynFlags) -> OptKind (CmdLineP DynFlags)
intSuffix fn = IntSuffix (\n -> upd (fn n))
optIntSuffixM :: (Maybe Int -> DynFlags -> DynP DynFlags)
-> OptKind (CmdLineP DynFlags)
optIntSuffixM fn = OptIntSuffix (\mi -> updM (fn mi))
setDumpFlag :: DynFlag -> OptKind (CmdLineP DynFlags)
setDumpFlag dump_flag = NoArg (setDumpFlag' dump_flag)
--------------------------
setDynFlag, unSetDynFlag :: DynFlag -> DynP ()
setDynFlag f = upd (\dfs -> dopt_set dfs f)
unSetDynFlag f = upd (\dfs -> dopt_unset dfs f)
--------------------------
setWarningFlag, unSetWarningFlag :: WarningFlag -> DynP ()
setWarningFlag f = upd (\dfs -> wopt_set dfs f)
unSetWarningFlag f = upd (\dfs -> wopt_unset dfs f)
--------------------------
setExtensionFlag, unSetExtensionFlag :: ExtensionFlag -> DynP ()
setExtensionFlag f = do upd (\dfs -> xopt_set dfs f)
sequence_ deps
where
deps = [ if turn_on then setExtensionFlag d
else unSetExtensionFlag d
| (f', turn_on, d) <- impliedFlags, f' == f ]
-- When you set f, set the ones it implies
-- NB: use setExtensionFlag recursively, in case the implied flags
-- implies further flags
unSetExtensionFlag f = upd (\dfs -> xopt_unset dfs f)
-- When you un-set f, however, we don't un-set the things it implies
-- (except for -fno-glasgow-exts, which is treated specially)
--------------------------
alterSettings :: (Settings -> Settings) -> DynFlags -> DynFlags
alterSettings f dflags = dflags { settings = f (settings dflags) }
--------------------------
setDumpFlag' :: DynFlag -> DynP ()
setDumpFlag' dump_flag
= do setDynFlag dump_flag
when want_recomp forceRecompile
where
-- Certain dumpy-things are really interested in what's going
-- on during recompilation checking, so in those cases we
-- don't want to turn it off.
want_recomp = dump_flag `notElem` [Opt_D_dump_if_trace,
Opt_D_dump_hi_diffs]
forceRecompile :: DynP ()
-- Whenver we -ddump, force recompilation (by switching off the
-- recompilation checker), else you don't see the dump! However,
-- don't switch it off in --make mode, else *everything* gets
-- recompiled which probably isn't what you want
forceRecompile = do dfs <- liftEwM getCmdLineState
when (force_recomp dfs) (setDynFlag Opt_ForceRecomp)
where
force_recomp dfs = isOneShot (ghcMode dfs)
setVerboseCore2Core :: DynP ()
setVerboseCore2Core = do forceRecompile
setDynFlag Opt_D_verbose_core2core
upd (\dfs -> dfs { shouldDumpSimplPhase = Nothing })
setDumpSimplPhases :: String -> DynP ()
setDumpSimplPhases s = do forceRecompile
upd (\dfs -> dfs { shouldDumpSimplPhase = Just spec })
where
spec = case s of { ('=' : s') -> s'; _ -> s }
setVerbosity :: Maybe Int -> DynP ()
setVerbosity mb_n = upd (\dfs -> dfs{ verbosity = mb_n `orElse` 3 })
addCmdlineHCInclude :: String -> DynP ()
addCmdlineHCInclude a = upd (\s -> s{cmdlineHcIncludes = a : cmdlineHcIncludes s})
extraPkgConf_ :: FilePath -> DynP ()
extraPkgConf_ p = upd (\s -> s{ extraPkgConfs = p : extraPkgConfs s })
exposePackage, exposePackageId, hidePackage, ignorePackage,
trustPackage, distrustPackage :: String -> DynP ()
exposePackage p =
upd (\s -> s{ packageFlags = ExposePackage p : packageFlags s })
exposePackageId p =
upd (\s -> s{ packageFlags = ExposePackageId p : packageFlags s })
hidePackage p =
upd (\s -> s{ packageFlags = HidePackage p : packageFlags s })
ignorePackage p =
upd (\s -> s{ packageFlags = IgnorePackage p : packageFlags s })
trustPackage p = exposePackage p >> -- both trust and distrust also expose a package
upd (\s -> s{ packageFlags = TrustPackage p : packageFlags s })
distrustPackage p = exposePackage p >>
upd (\s -> s{ packageFlags = DistrustPackage p : packageFlags s })
setPackageName :: String -> DynFlags -> DynFlags
setPackageName p s = s{ thisPackage = stringToPackageId p }
-- If we're linking a binary, then only targets that produce object
-- code are allowed (requests for other target types are ignored).
setTarget :: HscTarget -> DynP ()
setTarget l = upd set
where
set dfs
| ghcLink dfs /= LinkBinary || isObjectTarget l = dfs{ hscTarget = l }
| otherwise = dfs
-- Changes the target only if we're compiling object code. This is
-- used by -fasm and -fllvm, which switch from one to the other, but
-- not from bytecode to object-code. The idea is that -fasm/-fllvm
-- can be safely used in an OPTIONS_GHC pragma.
setObjTarget :: HscTarget -> DynP ()
setObjTarget l = updM set
where
set dflags
| isObjectTarget (hscTarget dflags)
= case l of
HscC
| cGhcUnregisterised /= "YES" ->
do addWarn ("Compiler not unregisterised, so ignoring " ++ flag)
return dflags
HscAsm
| cGhcWithNativeCodeGen /= "YES" ->
do addWarn ("Compiler has no native codegen, so ignoring " ++
flag)
return dflags
HscLlvm
| not ((arch == ArchX86_64) && (os == OSLinux || os == OSDarwin)) &&
(not opt_Static || opt_PIC)
->
do addWarn ("Ignoring " ++ flag ++ " as it is incompatible with -fPIC and -dynamic on this platform")
return dflags
_ -> return $ dflags { hscTarget = l }
| otherwise = return dflags
where platform = targetPlatform dflags
arch = platformArch platform
os = platformOS platform
flag = showHscTargetFlag l
setOptLevel :: Int -> DynFlags -> DynP DynFlags
setOptLevel n dflags
| hscTarget dflags == HscInterpreted && n > 0
= do addWarn "-O conflicts with --interactive; -O ignored."
return dflags
| otherwise
= return (updOptLevel n dflags)
-- -Odph is equivalent to
--
-- -O2 optimise as much as possible
-- -fmax-simplifier-iterations20 this is necessary sometimes
-- -fsimplifier-phases=3 we use an additional simplifier phase for fusion
--
setDPHOpt :: DynFlags -> DynP DynFlags
setDPHOpt dflags = setOptLevel 2 (dflags { maxSimplIterations = 20
, simplPhases = 3
})
setMainIs :: String -> DynP ()
setMainIs arg
| not (null main_fn) && isLower (head main_fn)
-- The arg looked like "Foo.Bar.baz"
= upd $ \d -> d{ mainFunIs = Just main_fn,
mainModIs = mkModule mainPackageId (mkModuleName main_mod) }
| isUpper (head arg) -- The arg looked like "Foo" or "Foo.Bar"
= upd $ \d -> d{ mainModIs = mkModule mainPackageId (mkModuleName arg) }
| otherwise -- The arg looked like "baz"
= upd $ \d -> d{ mainFunIs = Just arg }
where
(main_mod, main_fn) = splitLongestPrefix arg (== '.')
-----------------------------------------------------------------------------
-- Paths & Libraries
addImportPath, addLibraryPath, addIncludePath, addFrameworkPath :: FilePath -> DynP ()
-- -i on its own deletes the import paths
addImportPath "" = upd (\s -> s{importPaths = []})
addImportPath p = upd (\s -> s{importPaths = importPaths s ++ splitPathList p})
addLibraryPath p =
upd (\s -> s{libraryPaths = libraryPaths s ++ splitPathList p})
addIncludePath p =
upd (\s -> s{includePaths = includePaths s ++ splitPathList p})
addFrameworkPath p =
upd (\s -> s{frameworkPaths = frameworkPaths s ++ splitPathList p})
#ifndef mingw32_TARGET_OS
split_marker :: Char
split_marker = ':' -- not configurable (ToDo)
#endif
splitPathList :: String -> [String]
splitPathList s = filter notNull (splitUp s)
-- empty paths are ignored: there might be a trailing
-- ':' in the initial list, for example. Empty paths can
-- cause confusion when they are translated into -I options
-- for passing to gcc.
where
#ifndef mingw32_TARGET_OS
splitUp xs = split split_marker xs
#else
-- Windows: 'hybrid' support for DOS-style paths in directory lists.
--
-- That is, if "foo:bar:baz" is used, this interpreted as
-- consisting of three entries, 'foo', 'bar', 'baz'.
-- However, with "c:/foo:c:\\foo;x:/bar", this is interpreted
-- as 3 elts, "c:/foo", "c:\\foo", "x:/bar"
--
-- Notice that no attempt is made to fully replace the 'standard'
-- split marker ':' with the Windows / DOS one, ';'. The reason being
-- that this will cause too much breakage for users & ':' will
-- work fine even with DOS paths, if you're not insisting on being silly.
-- So, use either.
splitUp [] = []
splitUp (x:':':div:xs) | div `elem` dir_markers
= ((x:':':div:p): splitUp rs)
where
(p,rs) = findNextPath xs
-- we used to check for existence of the path here, but that
-- required the IO monad to be threaded through the command-line
-- parser which is quite inconvenient. The
splitUp xs = cons p (splitUp rs)
where
(p,rs) = findNextPath xs
cons "" xs = xs
cons x xs = x:xs
-- will be called either when we've consumed nought or the
-- "<Drive>:/" part of a DOS path, so splitting is just a Q of
-- finding the next split marker.
findNextPath xs =
case break (`elem` split_markers) xs of
(p, _:ds) -> (p, ds)
(p, xs) -> (p, xs)
split_markers :: [Char]
split_markers = [':', ';']
dir_markers :: [Char]
dir_markers = ['/', '\\']
#endif
-- -----------------------------------------------------------------------------
-- tmpDir, where we store temporary files.
setTmpDir :: FilePath -> DynFlags -> DynFlags
setTmpDir dir = alterSettings (\s -> s { sTmpDir = normalise dir })
-- we used to fix /cygdrive/c/.. on Windows, but this doesn't
-- seem necessary now --SDM 7/2/2008
-----------------------------------------------------------------------------
-- RTS opts
setRtsOpts :: String -> DynP ()
setRtsOpts arg = upd $ \ d -> d {rtsOpts = Just arg}
setRtsOptsEnabled :: RtsOptsEnabled -> DynP ()
setRtsOptsEnabled arg = upd $ \ d -> d {rtsOptsEnabled = arg}
-----------------------------------------------------------------------------
-- Hpc stuff
setOptHpcDir :: String -> DynP ()
setOptHpcDir arg = upd $ \ d -> d{hpcDir = arg}
-----------------------------------------------------------------------------
-- Via-C compilation stuff
-- There are some options that we need to pass to gcc when compiling
-- Haskell code via C, but are only supported by recent versions of
-- gcc. The configure script decides which of these options we need,
-- and puts them in the "settings" file in $topdir. The advantage of
-- having these in a separate file is that the file can be created at
-- install-time depending on the available gcc version, and even
-- re-generated later if gcc is upgraded.
--
-- The options below are not dependent on the version of gcc, only the
-- platform.
picCCOpts :: DynFlags -> [String]
picCCOpts dflags
= case platformOS (targetPlatform dflags) of
OSDarwin
-- Apple prefers to do things the other way round.
-- PIC is on by default.
-- -mdynamic-no-pic:
-- Turn off PIC code generation.
-- -fno-common:
-- Don't generate "common" symbols - these are unwanted
-- in dynamic libraries.
| opt_PIC -> ["-fno-common", "-U __PIC__", "-D__PIC__"]
| otherwise -> ["-mdynamic-no-pic"]
OSMinGW32 -- no -fPIC for Windows
| opt_PIC -> ["-U __PIC__", "-D__PIC__"]
| otherwise -> []
_
-- we need -fPIC for C files when we are compiling with -dynamic,
-- otherwise things like stub.c files don't get compiled
-- correctly. They need to reference data in the Haskell
-- objects, but can't without -fPIC. See
-- http://hackage.haskell.org/trac/ghc/wiki/Commentary/PositionIndependentCode
| opt_PIC || not opt_Static -> ["-fPIC", "-U __PIC__", "-D__PIC__"]
| otherwise -> []
-- -----------------------------------------------------------------------------
-- Splitting
can_split :: Bool
can_split = cSupportsSplitObjs == "YES"
-- -----------------------------------------------------------------------------
-- Compiler Info
compilerInfo :: DynFlags -> [(String, String)]
compilerInfo dflags
= -- We always make "Project name" be first to keep parsing in
-- other languages simple, i.e. when looking for other fields,
-- you don't have to worry whether there is a leading '[' or not
("Project name", cProjectName)
-- Next come the settings, so anything else can be overridden
-- in the settings file (as "lookup" uses the first match for the
-- key)
: rawSettings dflags
++ [("Project version", cProjectVersion),
("Booter version", cBooterVersion),
("Stage", cStage),
("Build platform", cBuildPlatformString),
("Host platform", cHostPlatformString),
("Target platform", cTargetPlatformString),
("Have interpreter", cGhcWithInterpreter),
("Object splitting supported", cSupportsSplitObjs),
("Have native code generator", cGhcWithNativeCodeGen),
("Support SMP", cGhcWithSMP),
("Unregisterised", cGhcUnregisterised),
("Tables next to code", cGhcEnableTablesNextToCode),
("RTS ways", cGhcRTSWays),
("Leading underscore", cLeadingUnderscore),
("Debug on", show debugIsOn),
("LibDir", topDir dflags),
("Global Package DB", systemPackageConfig dflags),
("Gcc Linker flags", show cGccLinkerOpts),
("Ld Linker flags", show cLdLinkerOpts)
]
| ilyasergey/GHC-XAppFix | compiler/main/DynFlags.hs | bsd-3-clause | 111,305 | 0 | 28 | 30,676 | 21,070 | 11,946 | 9,124 | 1,804 | 4 |
module Test.Utils (
testProvider
, (@=~?)
, (?=~@)
, (=~)
) where
import qualified Data.Array.Repa as R
import qualified Data.Vector.Generic as VG
import qualified Data.Vector.Storable as VS
import qualified Data.Vector.Unboxed as VU
import qualified Test.Framework as TF
import qualified Test.Framework.Providers.HUnit as TFH
import qualified Test.HUnit as HU
infix 4 =~
class AEq a where
(=~) :: a -> a -> Bool
instance AEq Double where
x =~ y = abs ( x - y ) < (1.0e-8 :: Double)
instance AEq Int where
x =~ y = x == y
instance (AEq a) => AEq [a] where
xs =~ ys = (length xs == length ys) &&
(all (\(x,y) -> x =~ y) $ zip xs ys)
instance (AEq a) => AEq (Maybe a) where
Nothing =~ Nothing = True
Just x =~ Just y = x =~ y
_ =~ _ = False
instance (AEq a, AEq b) => AEq (a, b) where
(a, b) =~ (aa, bb) = (a =~ aa) && (b =~ bb)
instance (AEq a, AEq b, AEq c) => AEq (a, b, c) where
(a, b, c) =~ (aa, bb, cc) = (a =~ aa) && (b =~ bb) && (c =~ cc)
instance (AEq a, AEq b, AEq c, AEq d) => AEq (a, b, c, d) where
(a, b, c, d) =~ (aa, bb, cc, dd) = (a =~ aa) && (b =~ bb) &&
(c =~ cc) && (d =~ dd)
instance (AEq e, R.Shape sh, R.Source r e) => AEq (R.Array r sh e) where
xs =~ ys = (R.extent xs == R.extent ys) &&
(R.foldAllS (&&) True $ R.zipWith (=~) xs ys)
instance (AEq a, VG.Vector VU.Vector a) => AEq (VU.Vector a) where
xs =~ ys = (VG.toList xs) =~ (VG.toList ys)
instance (AEq a, VG.Vector VS.Vector a) => AEq (VS.Vector a) where
xs =~ ys = (VG.toList xs) =~ (VG.toList ys)
-- This function takes the name for the test, a testing function and a data
-- provider and creates a testGroup
testProvider :: String -> (a -> HU.Assertion) -> [a] -> TF.Test
testProvider testGroupName testFunction =
TF.testGroup testGroupName . map createTest . zipWith assignName [1::Int ..]
where
createTest (name, dataSet) = TFH.testCase name $ testFunction dataSet
assignName setNumber dataSet = ("Data set " ++ show setNumber, dataSet)
-- "Almost equal" assertions for HUnit
infix 4 @=~?
(@=~?) :: (Show a, AEq a) => a -> a -> HU.Assertion
(@=~?) expected actual = expected =~ actual HU.@? assertionMsg
where
assertionMsg = "Expected : " ++ show expected ++
"\nActual : " ++ show actual
infix 4 ?=~@
(?=~@) :: (Show a, AEq a) => a -> a -> HU.Assertion
(?=~@) actual expected = actual =~ expected HU.@? assertionMsg
where
assertionMsg = "Actual : " ++ show actual ++
"\nExpected : " ++ show expected
| jstolarek/lattice-structure-hs | tests/Test/Utils.hs | bsd-3-clause | 2,737 | 0 | 12 | 820 | 1,147 | 626 | 521 | -1 | -1 |
module Main where
import Data.Monoid
input = ".^^.^^^..^.^..^.^^.^^^^.^^.^^...^..^...^^^..^^...^..^^^^^^..^.^^^..^.^^^^.^^^.^...^^^.^^.^^^.^.^^.^."
isTrap "^^." = '^' -- Its left and center tiles are traps, but its right tile is not.
isTrap ".^^" = '^' -- Its center and right tiles are traps, but its left tile is not.
isTrap "^.." = '^' -- Only its left tile is a trap.
isTrap "..^" = '^' -- Only its right tile is a trap.
isTrap _ = '.' -- None of the above, tile is safe
walk (a:b:c:ds) = isTrap [a,b,c] : walk (b:c:ds)
walk _ = []
inflate input = "." <> input <> "." -- add safe tiles on either end
nextRow thisRow = walk $ inflate thisRow
main = do print $ length $ filter (== '.') (concat $ take 40 rows)
print $ length $ filter (== '.') (concat $ take 400000 rows)
where rows = iterate nextRow input
| shapr/adventofcode2016 | src/Eighteen/Main.hs | bsd-3-clause | 850 | 0 | 11 | 189 | 235 | 124 | 111 | 15 | 1 |
module PrefAttachment (runPrefAttachment) where
import Graph
import Data.List
import System.Random
import Test.QuickCheck
import qualified Data.IntMap.Strict as Map
runPrefAttachment :: Int -> IO ((Graph Int),Int)
runPrefAttachment 1 = return ((createGraph 1),0)
runPrefAttachment num =
let
graph = createGraph num
in loop graph 0
loop :: (Graph Int) -> Int -> IO ((Graph Int),Int)
loop g edge_nr = do
v1 <- randomRIO (0,(length $ Map.keys g)-1)
v2 <- generate $ frequency $ map (\(a,b) -> (b,return a)) $ degrees g
if v1==v2
then loop g edge_nr
else do
let new_graph = addEdge g (v1,v2)
if verifyConnected new_graph
then return (new_graph,edge_nr+1)
else loop new_graph (edge_nr+1)
| jbddc/sdc-graph | src/PrefAttachment.hs | bsd-3-clause | 726 | 0 | 14 | 145 | 313 | 165 | 148 | 23 | 3 |
{-
Euler discovered the remarkable quadratic formula:
n^2 + n + 41
It turns out that the formula will produce 40 primes for the consecutive values n = 0 to 39. However, when n = 40, 40^2 + 40 + 41 = 40(40 + 1) + 41 is divisible by 41, and certainly when n = 41, 41^2 + 41 + 41 is clearly divisible by 41.
The incredible formula n^2 − 79n + 1601 was discovered, which produces 80 primes for the consecutive values n = 0 to 79. The product of the coefficients, −79 and 1601, is −126479.
Considering quadratics of the form:
n^2 + an + b, where |a| < 1000 and |b| < 1000
where |n| is the modulus/absolute value of n
e.g. |11| = 11 and |−4| = 4
Find the product of the coefficients, a and b, for the quadratic expression that produces the maximum number of primes for consecutive values of n, starting with n = 0.
-}
import qualified Data.List as List
import qualified Data.Ord as Ord
import qualified Zora.List as ZList
import qualified Zora.Math as ZMath
import Control.Applicative
num_primes_produced :: (Integer -> Integer) -> Int
num_primes_produced f =
length . takeWhile (ZMath.prime . f) $ [0..]
k :: Integer
k = 999
as :: [Integer]
as = [-k..k]
positive_small_primes :: [Integer]
positive_small_primes = takeWhile (<= k) ZMath.primes
quadratics :: [(Integer -> Integer, (Integer, Integer))]
quadratics = f <$> as <*> positive_small_primes
where
f :: Integer -> Integer -> (Integer -> Integer, (Integer, Integer))
f a b = ((\n -> n^2 + (a * n) + b), (a, b))
fs_with_results :: [(Int, (Integer, Integer))]
fs_with_results = map (ZList.map_fst (($) num_primes_produced))quadratics
max_with_index :: ((Int, (Integer, Integer)), Integer)
max_with_index = ZList.maximum_with_index fs_with_results
ab :: (Integer, Integer)
ab = snd . fst $ max_with_index
main :: IO ()
main = do
putStrLn . show $ fst ab * snd ab | bgwines/project-euler | src/solved/problem27.hs | bsd-3-clause | 1,843 | 2 | 13 | 340 | 399 | 232 | 167 | 27 | 1 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE DeriveGeneric #-}
module Main ( main ) where
import GHC.Generics ( Generic )
import Criterion.Main
import qualified Data.Binary as B
import qualified Data.Serialize as S
data SerializeMe a =
SerializeMe [[(a,[a])]] [[(a,[a])]] [[(a,[a])]] [[(a,[a])]] [[(a,[a])]] [[(a,[a])]] [[(a,[a])]]
deriving Generic
instance S.Serialize a => S.Serialize (SerializeMe a)
instance B.Binary a => B.Binary (SerializeMe a)
exampleData :: SerializeMe Double
exampleData = SerializeMe x x x x x x x
where
x :: [[(Double, [Double])]]
x = replicate 200 (replicate 4 (pi, replicate 40 pi))
main :: IO ()
main =
defaultMain
[ bench "cereal-encode-nf" $ nf S.encode exampleData
, bench "cereal-encodeLazy-nf" $ nf S.encodeLazy exampleData
, bench "binary-encode-nf" $ nf B.encode exampleData
]
| ghorn/binary-counterexample | src/Main.hs | bsd-3-clause | 855 | 0 | 11 | 163 | 368 | 211 | 157 | 22 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
------------------------------------------------------------------------------
module Snap.Internal.Http.Server.Session
( httpSession
) where
------------------------------------------------------------------------------
import Blaze.ByteString.Builder (Builder, flush,
fromByteString)
import Blaze.ByteString.Builder.Char8 (fromChar, fromShow)
import Control.Applicative ((<$>), (<|>))
import Control.Arrow (first, second)
import Control.Exception (Exception, Handler (..),
SomeException (..), catches,
throwIO)
import Control.Monad (when)
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as S
import qualified Data.CaseInsensitive as CI
import Data.Int (Int64)
import Data.IORef (writeIORef)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import Data.Monoid (mconcat)
import Data.Typeable (Typeable)
import System.IO.Streams (InputStream)
import qualified System.IO.Streams as Streams
------------------------------------------------------------------------------
import Snap.Core (EscapeSnap (..))
import Snap.Internal.Http.Server.Parser (IRequest (..), parseCookie,
parseRequest,
parseUrlEncoded,
readChunkedTransferEncoding)
import Snap.Internal.Http.Server.Types (AcceptHook, DataFinishedHook,
ParseHook,
PerSessionData (..),
ServerConfig (..),
ServerHandler,
SessionFinishedHook,
SessionHandler,
UserHandlerFinishedHook)
import Snap.Internal.Http.Types (Method (..), Request (..),
getHeader)
import Snap.Internal.Parsing (unsafeFromInt)
import qualified Snap.Types.Headers as H
------------------------------------------------------------------------------
------------------------------------------------------------------------------
data TerminateSessionException = TerminateSessionException SomeException
deriving (Typeable, Show)
instance Exception TerminateSessionException
data BadRequestException = BadRequestException
deriving (Typeable, Show)
instance Exception BadRequestException
data LengthRequiredException = LengthRequiredException
deriving (Typeable, Show)
instance Exception LengthRequiredException
------------------------------------------------------------------------------
mAX_HEADERS_SIZE :: Int64
mAX_HEADERS_SIZE = 256 * 1024
------------------------------------------------------------------------------
httpSession :: hookState
-> ServerHandler hookState
-> ServerConfig hookState
-> PerSessionData
-> IO ()
httpSession !hookState !serverHandler !config !sessionData = begin
where
--------------------------------------------------------------------------
defaultTimeout = _defaultTimeout config
localAddress = _localAddress sessionData
localPort = _localPort config
localHostname = _localHostname config
readEnd = _readEnd sessionData
remoteAddress = _remoteAddress sessionData
remotePort = _remotePort sessionData
tickle = _twiddleTimeout sessionData
writeEnd = _writeEnd sessionData
isSecure = _isSecure config
forceConnectionClose = _forceConnectionClose sessionData
logError = _logError config
--------------------------------------------------------------------------
{-# INLINE begin #-}
begin :: IO ()
begin = do
-- parse HTTP request
receiveRequest >>= maybe (_onSessionFinished config hookState)
processRequest
--------------------------------------------------------------------------
{-# INLINE receiveRequest #-}
receiveRequest :: IO (Maybe Request)
receiveRequest = do
readEnd' <- Streams.throwIfProducesMoreThan mAX_HEADERS_SIZE readEnd
parseRequest readEnd' >>= maybe (return Nothing)
((Just <$>) . toRequest)
--------------------------------------------------------------------------
toRequest :: IRequest -> IO Request
toRequest !ireq = do
-- HTTP spec section 14.23: "All Internet-based HTTP/1.1 servers MUST
-- respond with a 400 (Bad Request) status code to any HTTP/1.1 request
-- message which lacks a Host header field."
--
-- Here we interpret this slightly more liberally: if an absolute URI
-- including a hostname is given in the request line, we'll take that
-- if there's no Host header.
--
-- For HTTP/1.0 requests, we pick the configured local hostname by
-- default.
host <- maybe (if isHttp11
then badRequestWithNoHost
else return localHostname)
return mbHost
-- Handle transfer-encoding: chunked, etc
readEnd' <- setupReadEnd
(readEnd'', postParams) <- parseForm readEnd'
let allParams = Map.unionWith (++) queryParams postParams
checkConnectionClose version hdrs
return $! Request host
remoteAddress
remotePort
localAddress
localPort
localHostname
isSecure
hdrs
readEnd''
mbCL
method
version
cookies
pathInfo
contextPath
uri
queryString
allParams
queryParams
postParams
where
----------------------------------------------------------------------
method = iMethod ireq
version = iHttpVersion ireq
isHttp11 = version >= (1, 1)
mbHost = H.lookup "host" hdrs <|> iHost ireq
localHost = fromMaybe localHostname $! iHost ireq
hdrs = toHeaders $! iRequestHeaders ireq
isChunked = (CI.mk <$> H.lookup "transfer-encoding" hdrs)
== Just "chunked"
mbCL = unsafeFromInt <$> H.lookup "content-length" hdrs
cookies = fromMaybe [] (H.lookup "cookie" hdrs >>= parseCookie)
contextPath = "/"
uri = iRequestUri ireq
queryParams = parseUrlEncoded queryString
emptyParams = Map.empty
----------------------------------------------------------------------
(pathInfo, queryString) = first dropLeadingSlash . second (S.drop 1)
$ S.break (== '?') uri
----------------------------------------------------------------------
{-# INLINE dropLeadingSlash #-}
dropLeadingSlash s = let f (a, s') = if a == '/' then s' else s
mbS = S.uncons s
in maybe s f mbS
----------------------------------------------------------------------
{-# INLINE setupReadEnd #-}
setupReadEnd = do
readEnd' <- if isChunked
then readChunkedTransferEncoding readEnd
else return readEnd
maybe noContentLength Streams.takeBytes mbCL readEnd'
----------------------------------------------------------------------
noContentLength :: InputStream ByteString
-> IO (InputStream ByteString)
noContentLength readEnd' = do
when (method `elem` [POST, PUT]) return411
Streams.takeBytes 0 readEnd'
----------------------------------------------------------------------
return411 = do
let (major, minor) = version
let resp = mconcat [ fromByteString "HTTP/"
, fromShow major
, fromChar '.'
, fromShow minor
, fromByteString " 411 Length Required\r\n\r\n"
, fromByteString "411 Length Required\r\n"
, flush
]
Streams.write (Just resp) writeEnd
Streams.write Nothing writeEnd
terminateSession LengthRequiredException
----------------------------------------------------------------------
parseForm readEnd' = if doIt
then getIt
else return (readEnd', emptyParams)
where
trimIt = fst . S.spanEnd (== ' ') . S.takeWhile (/= ';')
. S.dropWhile (== ' ')
mbCT = trimIt <$> H.lookup "content-type" hdrs
doIt = mbCT == Just "application/x-www-form-urlencoded"
mAX_POST_BODY_SIZE = 1024 * 1024
getIt = do
readEnd'' <- Streams.throwIfProducesMoreThan
mAX_POST_BODY_SIZE readEnd'
contents <- S.concat <$> Streams.toList readEnd''
let postParams = parseUrlEncoded contents
finalReadEnd <- Streams.fromList [contents]
return (finalReadEnd, postParams)
----------------------------------------------------------------------
checkConnectionClose version hdrs = do
-- For HTTP/1.1: if there is an explicit Connection: close, close
-- the socket.
--
-- For HTTP/1.0: if there is no explicit Connection: Keep-Alive,
-- close the socket.
let v = CI.mk <$> H.lookup "Connection" hdrs
when ((version == (1, 1) && v == Just "close") ||
(version == (1, 0) && v /= Just "keep-alive")) $
writeIORef forceConnectionClose True
--------------------------------------------------------------------------
{-# INLINE badRequestWithNoHost #-}
badRequestWithNoHost :: IO a
badRequestWithNoHost = do
let msg = mconcat [
fromByteString "HTTP/1.1 400 Bad Request\r\n\r\n"
, fromByteString "400 Bad Request: HTTP/1.1 request with no "
, fromByteString "Host header\r\n"
, flush
]
Streams.write (Just msg) writeEnd
Streams.write Nothing writeEnd
terminateSession BadRequestException
--------------------------------------------------------------------------
{-# INLINE checkExpect100Continue #-}
checkExpect100Continue req =
when (getHeader "Expect" req == Just "100-continue") $ do
let (major, minor) = rqVersion req
let hl = mconcat [ fromByteString "HTTP/"
, fromShow major
, fromChar '.'
, fromShow minor
, fromByteString " 100 Continue\r\n\r\n"
, flush
]
Streams.write (Just hl) writeEnd
--------------------------------------------------------------------------
{-# INLINE processRequest #-}
processRequest !req = do
-- successfully parsed a request, so restart the timer
tickle $ max defaultTimeout
-- check for Expect: 100-continue
checkExpect100Continue req
runServerHandler req
`catches` [ Handler escapeSnapHandler
, Handler $ catchUserException "user handler" req
]
undefined
--------------------------------------------------------------------------
{-# INLINE runServerHandler #-}
runServerHandler !req = do
undefined
--------------------------------------------------------------------------
escapeSnapHandler (EscapeHttp escapeHandler) = escapeHandler tickle
readEnd
writeEnd
escapeSnapHandler (TerminateConnection e) = terminateSession e
--------------------------------------------------------------------------
catchUserException :: ByteString -> Request -> SomeException -> IO ()
catchUserException phase req e = do
logError $ mconcat [
fromByteString "Exception leaked to httpSession during phase '"
, fromByteString phase
, fromByteString "': \n"
, requestErrorMessage req e
]
terminateSession e
------------------------------------------------------------------------------
toHeaders :: [(ByteString, ByteString)] -> H.Headers
toHeaders = H.fromList . map (first CI.mk)
------------------------------------------------------------------------------
terminateSession :: Exception e => e -> IO a
terminateSession = throwIO . TerminateSessionException . SomeException
------------------------------------------------------------------------------
requestErrorMessage :: Request -> SomeException -> Builder
requestErrorMessage req e =
mconcat [ fromByteString "During processing of request from "
, fromByteString $ rqRemoteAddr req
, fromByteString ":"
, fromShow $ rqRemotePort req
, fromByteString "\nrequest:\n"
, fromShow $ show req
, fromByteString "\n"
, msgB
]
where
msgB = mconcat [
fromByteString "A web handler threw an exception. Details:\n"
, fromShow e
]
| afcowie/new-snap-server | src/Snap/Internal/Http/Server/Session.hs | bsd-3-clause | 14,889 | 0 | 17 | 5,493 | 2,223 | 1,186 | 1,037 | 237 | 6 |
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# OPTIONS -w #-}
module Prelude (print, Base.Int) where
import Fay.Types (Fay)
import Fay.FFI
import qualified "base" Prelude as Base
import "base" Prelude (Bool(True,False)
,(||),(&&),seq,Eq,(==),(/=))
--------------------------------------------------------------------------------
-- Fixities
infixr 9 .
infixl 1 >>, >>=
infixr 0 $
--------------------------------------------------------------------------------
-- Aliases of base
type String = Base.String
type Double = Base.Double
type Char = Base.Char
--------------------------------------------------------------------------------
-- Standard data types
-- | Maybe type.
data Maybe a = Just a | Nothing
instance Base.Read a => Base.Read (Maybe a)
instance Base.Show a => Base.Show (Maybe a)
--------------------------------------------------------------------------------
-- Monads
-- | Monomorphic bind for Fay.
(>>=) :: Fay a -> (a -> Fay b) -> Fay b
(>>=) = ffi "Fay$$bind(%1)(%2)"
-- | Monomorphic then for Fay.
(>>) :: Ptr (Fay a) -> Ptr (Fay b) -> Ptr (Fay b)
(>>) = ffi "Fay$$then(%1)(%2)"
-- | Monomorphic return for Fay.
return :: a -> Fay a
return = ffi "Fay$$return(%1)"
--------------------------------------------------------------------------------
-- Show
-- | Uses JSON.stringify.
show :: Automatic a -> String
show = ffi "JSON.stringify(%1)"
--------------------------------------------------------------------------------
-- Functions
(.) :: (t1 -> t) -> (t2 -> t1) -> t2 -> t
(f . g) x = f (g x)
($) :: (t1 -> t) -> t1 -> t
f $ x = f x
--------------------------------------------------------------------------------
-- IO
print :: Automatic a -> Fay ()
print = ffi "(function(x) { if (console && console.log) console.log(x) })(%1)"
putStrLn :: String -> Fay ()
putStrLn = ffi "(function(x) { if (console && console.log) console.log(x) })(%1)"
| faylang/fay-prim | src/Prelude.hs | bsd-3-clause | 2,030 | 0 | 11 | 340 | 479 | 277 | 202 | 36 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Main where
import qualified Data.Map as M
import Network
import NetworkedGame.Packet
import NetworkedGame.Server
import Control.Concurrent
import Control.Monad
import Control.Lens
import Data.Maybe
import System.IO
import System.Exit
import AnimatedDangerzone.Types
import Graphics.Gloss
import Graphics.Gloss.Juicy
import Graphics.Gloss.Interface.IO.Game
import AnimatedDangerzone.Client.Config as C
data ClientState =
ClientState { _clientCid :: ConnectionId
, _clientWorld :: World
, _clientHandle :: Handle
}
data Tile =
TEnv Block
| TPlayer
deriving (Eq, Read, Show, Ord)
makeLenses ''ClientState
tileFiles :: [(Tile, FilePath)]
tileFiles =
[ (TEnv Air, "images/tiles/dc-dngn/dngn_unseen.png")
, (TEnv Rubble, "images/tiles/dc-dngn/floor/cobble_blood5.png")
, (TEnv Rock, "images/tiles/dc-dngn/floor/cobble_blood1.png")
, (TEnv Stones, "images/tiles/dc-dngn/floor/floor_vines0.png")
, (TEnv Lava, "images/tiles/dc-dngn/floor/lava0.png")
, (TEnv Ice, "images/tiles/dc-dngn/floor/ice0.png")
, (TPlayer, "images/tiles/player/base/human_m.png")
]
loadTileMap :: IO (M.Map Tile Picture)
loadTileMap = do
pairs <- forM tileFiles $ \(t, path) ->
do Just p <- loadJuicy path
return (t, p)
return $ M.fromList pairs
clientState :: Handle -> ConnectionId -> World -> ClientState
clientState h cid w =
ClientState { _clientCid = cid
, _clientWorld = w
, _clientHandle = h
}
getPlayer :: MVar ClientState -> IO Player
getPlayer mv = do
cs <- readMVar mv
return $ fromJust $ cs^.clientWorld.worldPlayers.at (cs^.clientCid)
main :: IO ()
main = do
tileMap <- loadTileMap
withClientArgs $ \opts _ -> do
h <- connectTo (opts^.optServerName)
(PortNumber (opts^.optPortNumber))
hPutPacket h $ mkPacket $ ClientHello (opts^.optPlayerName)
resp <- hGetPacketed h
myCid <- case resp of
Hello myCid -> return myCid
UsernameConflict -> do
putStrLn "User already connected; please choose a different username."
exitFailure
_ -> do putStrLn "Protocol error: got unexpected message"
exitFailure
NewPlayer _ _ <- hGetPacketed h
SetWorld initialWorld <- hGetPacketed h
wmvar <- newMVar $ clientState h myCid initialWorld
_ <- forkIO $ forever $ networkThread h wmvar
let dpy = InWindow "game" (700, 700) (0, 0)
playIO
dpy
white
60
()
(const $ worldPicture wmvar tileMap)
(handleEvent wmvar)
stepWorld
moveUp :: MVar ClientState -> IO ()
moveUp mv = movePlayer mv (_1 +~ 1)
moveDown :: MVar ClientState -> IO ()
moveDown mv = movePlayer mv (_1 -~ 1)
moveLeft :: MVar ClientState -> IO ()
moveLeft mv = movePlayer mv (_2 -~ 1)
moveRight :: MVar ClientState -> IO ()
moveRight mv = movePlayer mv (_2 +~ 1)
movePlayer :: MVar ClientState -> (Coord -> Coord) -> IO ()
movePlayer mv f = do
p <- getPlayer mv
let old = p^.playerCoord
sendMessage mv $ ClientMove $ f old
sendMessage :: MVar ClientState -> ClientMsg -> IO ()
sendMessage mv msg = do
cs <- readMVar mv
hPutPacket (cs^.clientHandle) $ mkPacket msg
handleEvent :: MVar ClientState -> Event -> () -> IO ()
handleEvent mv e _st = do
case e of
EventKey (SpecialKey KeyUp) Down _ _ -> moveUp mv
EventKey (SpecialKey KeyDown) Down _ _ -> moveDown mv
EventKey (SpecialKey KeyLeft) Down _ _ -> moveLeft mv
EventKey (SpecialKey KeyRight) Down _ _ -> moveRight mv
_ -> return ()
return ()
-- Later, animation
stepWorld :: Float -> () -> IO ()
stepWorld = const return
networkThread :: Handle -> MVar ClientState -> IO ()
networkThread h mv = do
p <- hGetPacketed h
putStrLn $ "Got server packet: " ++ show p
case p of
SetWorld w -> modifyMVar_ mv $ \cs -> return $ cs & clientWorld .~ w
MovePlayer cid coord -> modifyMVar_ mv $ \cs ->
return $ cs & clientWorld.worldPlayers.ix cid.playerCoord .~ coord
un -> putStrLn $ "Unsupported message: " ++ show un
worldPicture :: MVar ClientState -> M.Map Tile Picture -> IO Picture
worldPicture mv tileMap = do
cs <- readMVar mv
let w = cs^.clientWorld
tilePicture (r,c) Nothing = trans (r,c) (TEnv Air)
tilePicture (r,c) (Just b) = trans (r,c) (TEnv b)
playerPicture p = trans (p^.playerCoord) TPlayer
trans (r,c) t = Translate (toEnum $ c*32) (toEnum $ r*32) $ tileMap M.! t
envTiles = [ tilePicture (r,c) (w^.worldBlocks.at(r,c))
| c <- [-20..20], r <- [-20..20] ]
playerTiles = [ playerPicture p | p <- M.elems (w^.worldPlayers) ]
return $ Pictures $ envTiles ++ playerTiles
| glguy/animated-dangerzone | src/GlossClient.hs | bsd-3-clause | 4,792 | 6 | 17 | 1,168 | 1,693 | 850 | 843 | -1 | -1 |
{-# LANGUAGE CPP, ForeignFunctionInterface, BangPatterns #-}
#if __GLASGOW_HASKELL__
{-# LANGUAGE UnliftedFFITypes, MagicHash,
UnboxedTuples, DeriveDataTypeable #-}
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Unsafe #-}
#endif
#endif
{-# OPTIONS_HADDOCK hide #-}
-- |
-- Module : Data.ByteString.Internal
-- Copyright : (c) Don Stewart 2006-2008
-- (c) Duncan Coutts 2006-2012
-- License : BSD-style
-- Maintainer : [email protected], [email protected]
-- Stability : unstable
-- Portability : non-portable
--
-- A module containing semi-public 'ByteString' internals. This exposes the
-- 'ByteString' representation and low level construction functions. As such
-- all the functions in this module are unsafe. The API is also not stable.
--
-- Where possible application should instead use the functions from the normal
-- public interface modules, such as "Data.ByteString.Unsafe". Packages that
-- extend the ByteString system at a low level will need to use this module.
--
module Data.ByteString.Internal (
-- * The @ByteString@ type and representation
ByteString(..), -- instances: Eq, Ord, Show, Read, Data, Typeable
-- * Conversion with lists: packing and unpacking
packBytes, packUptoLenBytes, unsafePackLenBytes,
packChars, packUptoLenChars, unsafePackLenChars,
unpackBytes, unpackAppendBytesLazy, unpackAppendBytesStrict,
unpackChars, unpackAppendCharsLazy, unpackAppendCharsStrict,
#if defined(__GLASGOW_HASKELL__)
unsafePackAddress,
#endif
-- * Low level imperative construction
create, -- :: Int -> (Ptr Word8 -> IO ()) -> IO ByteString
createUptoN, -- :: Int -> (Ptr Word8 -> IO Int) -> IO ByteString
createAndTrim, -- :: Int -> (Ptr Word8 -> IO Int) -> IO ByteString
createAndTrim', -- :: Int -> (Ptr Word8 -> IO (Int, Int, a)) -> IO (ByteString, a)
unsafeCreate, -- :: Int -> (Ptr Word8 -> IO ()) -> ByteString
unsafeCreateUptoN, -- :: Int -> (Ptr Word8 -> IO Int) -> ByteString
mallocByteString, -- :: Int -> IO (ForeignPtr a)
-- * Conversion to and from ForeignPtrs
fromForeignPtr, -- :: ForeignPtr Word8 -> Int -> Int -> ByteString
toForeignPtr, -- :: ByteString -> (ForeignPtr Word8, Int, Int)
-- * Utilities
inlinePerformIO, -- :: IO a -> a
nullForeignPtr, -- :: ForeignPtr Word8
-- * Standard C Functions
c_strlen, -- :: CString -> IO CInt
c_free_finalizer, -- :: FunPtr (Ptr Word8 -> IO ())
memchr, -- :: Ptr Word8 -> Word8 -> CSize -> IO Ptr Word8
memcmp, -- :: Ptr Word8 -> Ptr Word8 -> Int -> IO CInt
memcpy, -- :: Ptr Word8 -> Ptr Word8 -> Int -> IO ()
memset, -- :: Ptr Word8 -> Word8 -> CSize -> IO (Ptr Word8)
-- * cbits functions
c_reverse, -- :: Ptr Word8 -> Ptr Word8 -> CInt -> IO ()
c_intersperse, -- :: Ptr Word8 -> Ptr Word8 -> CInt -> Word8 -> IO ()
c_maximum, -- :: Ptr Word8 -> CInt -> IO Word8
c_minimum, -- :: Ptr Word8 -> CInt -> IO Word8
c_count, -- :: Ptr Word8 -> CInt -> Word8 -> IO CInt
-- * Chars
w2c, c2w, isSpaceWord8, isSpaceChar8
) where
import Prelude hiding (concat)
import qualified Data.List as List
import Foreign.ForeignPtr (ForeignPtr, withForeignPtr)
import Foreign.Ptr (Ptr, FunPtr, plusPtr)
import Foreign.Storable (Storable(..))
#if MIN_VERSION_base(4,5,0) || __GLASGOW_HASKELL__ >= 703
import Foreign.C.Types (CInt(..), CSize(..), CULong(..))
#else
import Foreign.C.Types (CInt, CSize, CULong)
#endif
import Foreign.C.String (CString)
import Data.Monoid (Monoid(..))
import Control.DeepSeq (NFData)
#if MIN_VERSION_base(3,0,0)
import Data.String (IsString(..))
#endif
#ifndef __NHC__
import Control.Exception (assert)
#endif
import Data.Char (ord)
import Data.Word (Word8)
import Data.Typeable (Typeable)
#if MIN_VERSION_base(4,1,0)
import Data.Data (Data(..))
#if MIN_VERSION_base(4,2,0)
import Data.Data (mkNoRepType)
#else
import Data.Data (mkNorepType)
#endif
#else
import Data.Generics (Data(..), mkNorepType)
#endif
#ifdef __GLASGOW_HASKELL__
import GHC.Base (realWorld#,unsafeChr)
#if MIN_VERSION_base(4,4,0)
import GHC.CString (unpackCString#)
#else
import GHC.Base (unpackCString#)
#endif
import GHC.Prim (Addr#)
#if __GLASGOW_HASKELL__ >= 611
import GHC.IO (IO(IO))
#else
import GHC.IOBase (IO(IO),RawBuffer)
#endif
#if __GLASGOW_HASKELL__ >= 611
import GHC.IO (unsafeDupablePerformIO)
#else
import GHC.IOBase (unsafeDupablePerformIO)
#endif
#else
import Data.Char (chr)
import System.IO.Unsafe (unsafePerformIO)
#endif
#ifdef __GLASGOW_HASKELL__
import GHC.ForeignPtr (newForeignPtr_, mallocPlainForeignPtrBytes)
import GHC.Ptr (Ptr(..), castPtr)
#else
import Foreign.ForeignPtr (mallocForeignPtrBytes)
#endif
#ifdef __GLASGOW_HASKELL__
import GHC.ForeignPtr (ForeignPtr(ForeignPtr))
import GHC.Base (nullAddr#)
#else
import Foreign.Ptr (nullPtr)
#endif
#if __HUGS__
import Hugs.ForeignPtr (newForeignPtr_)
#elif __GLASGOW_HASKELL__<=604
import Foreign.ForeignPtr (newForeignPtr_)
#endif
-- CFILES stuff is Hugs only
{-# CFILES cbits/fpstring.c #-}
-- An alternative to Control.Exception (assert) for nhc98
#ifdef __NHC__
#define assert assertS "__FILE__ : __LINE__"
assertS :: String -> Bool -> a -> a
assertS _ True = id
assertS s False = error ("assertion failed at "++s)
#endif
-- -----------------------------------------------------------------------------
--
-- Useful macros, until we have bang patterns
--
#define STRICT1(f) f a | a `seq` False = undefined
#define STRICT2(f) f a b | a `seq` b `seq` False = undefined
#define STRICT3(f) f a b c | a `seq` b `seq` c `seq` False = undefined
#define STRICT4(f) f a b c d | a `seq` b `seq` c `seq` d `seq` False = undefined
#define STRICT5(f) f a b c d e | a `seq` b `seq` c `seq` d `seq` e `seq` False = undefined
-- -----------------------------------------------------------------------------
-- | A space-efficient representation of a 'Word8' vector, supporting many
-- efficient operations.
--
-- A 'ByteString' contains 8-bit bytes, or by using the operations from
-- "Data.ByteString.Char8" it can be interpreted as containing 8-bit
-- characters.
--
data ByteString = PS {-# UNPACK #-} !(ForeignPtr Word8) -- payload
{-# UNPACK #-} !Int -- offset
{-# UNPACK #-} !Int -- length
#if defined(__GLASGOW_HASKELL__)
deriving (Typeable)
#endif
instance Eq ByteString where
(==) = eq
instance Ord ByteString where
compare = compareBytes
instance Monoid ByteString where
mempty = PS nullForeignPtr 0 0
mappend = append
mconcat = concat
instance NFData ByteString
instance Show ByteString where
showsPrec p ps r = showsPrec p (unpackChars ps) r
instance Read ByteString where
readsPrec p str = [ (packChars x, y) | (x, y) <- readsPrec p str ]
#if MIN_VERSION_base(3,0,0)
instance IsString ByteString where
fromString = packChars
#endif
instance Data ByteString where
gfoldl f z txt = z packBytes `f` (unpackBytes txt)
toConstr _ = error "Data.ByteString.ByteString.toConstr"
gunfold _ _ = error "Data.ByteString.ByteString.gunfold"
#if MIN_VERSION_base(4,2,0)
dataTypeOf _ = mkNoRepType "Data.ByteString.ByteString"
#else
dataTypeOf _ = mkNorepType "Data.ByteString.ByteString"
#endif
------------------------------------------------------------------------
-- Packing and unpacking from lists
packBytes :: [Word8] -> ByteString
packBytes ws = unsafePackLenBytes (List.length ws) ws
packChars :: [Char] -> ByteString
packChars cs = unsafePackLenChars (List.length cs) cs
#if defined(__GLASGOW_HASKELL__)
{-# INLINE [0] packChars #-}
{-# RULES
"ByteString packChars/packAddress" forall s .
packChars (unpackCString# s) = inlinePerformIO (unsafePackAddress s)
#-}
#endif
unsafePackLenBytes :: Int -> [Word8] -> ByteString
unsafePackLenBytes len xs0 =
unsafeCreate len $ \p -> go p xs0
where
go !_ [] = return ()
go !p (x:xs) = poke p x >> go (p `plusPtr` 1) xs
unsafePackLenChars :: Int -> [Char] -> ByteString
unsafePackLenChars len cs0 =
unsafeCreate len $ \p -> go p cs0
where
go !_ [] = return ()
go !p (c:cs) = poke p (c2w c) >> go (p `plusPtr` 1) cs
#if defined(__GLASGOW_HASKELL__)
-- | /O(n)/ Pack a null-terminated sequence of bytes, pointed to by an
-- Addr\# (an arbitrary machine address assumed to point outside the
-- garbage-collected heap) into a @ByteString@. A much faster way to
-- create an Addr\# is with an unboxed string literal, than to pack a
-- boxed string. A unboxed string literal is compiled to a static @char
-- []@ by GHC. Establishing the length of the string requires a call to
-- @strlen(3)@, so the Addr# must point to a null-terminated buffer (as
-- is the case with "string"# literals in GHC). Use 'unsafePackAddressLen'
-- if you know the length of the string statically.
--
-- An example:
--
-- > literalFS = unsafePackAddress "literal"#
--
-- This function is /unsafe/. If you modify the buffer pointed to by the
-- original Addr# this modification will be reflected in the resulting
-- @ByteString@, breaking referential transparency.
--
-- Note this also won't work if your Addr# has embedded '\0' characters in
-- the string, as @strlen@ will return too short a length.
--
unsafePackAddress :: Addr# -> IO ByteString
unsafePackAddress addr# = do
p <- newForeignPtr_ (castPtr cstr)
l <- c_strlen cstr
return $ PS p 0 (fromIntegral l)
where
cstr :: CString
cstr = Ptr addr#
{-# INLINE unsafePackAddress #-}
#endif
packUptoLenBytes :: Int -> [Word8] -> (ByteString, [Word8])
packUptoLenBytes len xs0 =
unsafeCreateUptoN' len $ \p -> go p len xs0
where
go !_ !n [] = return (len-n, [])
go !_ !0 xs = return (len, xs)
go !p !n (x:xs) = poke p x >> go (p `plusPtr` 1) (n-1) xs
packUptoLenChars :: Int -> [Char] -> (ByteString, [Char])
packUptoLenChars len cs0 =
unsafeCreateUptoN' len $ \p -> go p len cs0
where
go !_ !n [] = return (len-n, [])
go !_ !0 cs = return (len, cs)
go !p !n (c:cs) = poke p (c2w c) >> go (p `plusPtr` 1) (n-1) cs
-- Unpacking bytestrings into lists effeciently is a tradeoff: on the one hand
-- we would like to write a tight loop that just blats the list into memory, on
-- the other hand we want it to be unpacked lazily so we don't end up with a
-- massive list data structure in memory.
--
-- Our strategy is to combine both: we will unpack lazily in reasonable sized
-- chunks, where each chunk is unpacked strictly.
--
-- unpackBytes and unpackChars do the lazy loop, while unpackAppendBytes and
-- unpackAppendChars do the chunks strictly.
unpackBytes :: ByteString -> [Word8]
unpackBytes bs = unpackAppendBytesLazy bs []
unpackChars :: ByteString -> [Char]
unpackChars bs = unpackAppendCharsLazy bs []
unpackAppendBytesLazy :: ByteString -> [Word8] -> [Word8]
unpackAppendBytesLazy (PS fp off len) xs
| len <= 100 = unpackAppendBytesStrict (PS fp off len) xs
| otherwise = unpackAppendBytesStrict (PS fp off 100) remainder
where
remainder = unpackAppendBytesLazy (PS fp (off+100) (len-100)) xs
-- Why 100 bytes you ask? Because on a 64bit machine the list we allocate
-- takes just shy of 4k which seems like a reasonable amount.
-- (5 words per list element, 8 bytes per word, 100 elements = 4000 bytes)
unpackAppendCharsLazy :: ByteString -> [Char] -> [Char]
unpackAppendCharsLazy (PS fp off len) cs
| len <= 100 = unpackAppendCharsStrict (PS fp off len) cs
| otherwise = unpackAppendCharsStrict (PS fp off 100) remainder
where
remainder = unpackAppendCharsLazy (PS fp (off+100) (len-100)) cs
-- For these unpack functions, since we're unpacking the whole list strictly we
-- build up the result list in an accumulator. This means we have to build up
-- the list starting at the end. So our traversal starts at the end of the
-- buffer and loops down until we hit the sentinal:
unpackAppendBytesStrict :: ByteString -> [Word8] -> [Word8]
unpackAppendBytesStrict (PS fp off len) xs =
inlinePerformIO $ withForeignPtr fp $ \base -> do
loop (base `plusPtr` (off-1)) (base `plusPtr` (off-1+len)) xs
where
loop !sentinal !p acc
| p == sentinal = return acc
| otherwise = do x <- peek p
loop sentinal (p `plusPtr` (-1)) (x:acc)
unpackAppendCharsStrict :: ByteString -> [Char] -> [Char]
unpackAppendCharsStrict (PS fp off len) xs =
inlinePerformIO $ withForeignPtr fp $ \base ->
loop (base `plusPtr` (off-1)) (base `plusPtr` (off-1+len)) xs
where
loop !sentinal !p acc
| p == sentinal = return acc
| otherwise = do x <- peek p
loop sentinal (p `plusPtr` (-1)) (w2c x:acc)
------------------------------------------------------------------------
-- | The 0 pointer. Used to indicate the empty Bytestring.
nullForeignPtr :: ForeignPtr Word8
#ifdef __GLASGOW_HASKELL__
nullForeignPtr = ForeignPtr nullAddr# (error "nullForeignPtr") --TODO: should ForeignPtrContents be strict?
#else
nullForeignPtr = unsafePerformIO $ newForeignPtr_ nullPtr
{-# NOINLINE nullForeignPtr #-}
#endif
-- ---------------------------------------------------------------------
-- Low level constructors
-- | /O(1)/ Build a ByteString from a ForeignPtr.
--
-- If you do not need the offset parameter then you do should be using
-- 'Data.ByteString.Unsafe.unsafePackCStringLen' or
-- 'Data.ByteString.Unsafe.unsafePackCStringFinalizer' instead.
--
fromForeignPtr :: ForeignPtr Word8
-> Int -- ^ Offset
-> Int -- ^ Length
-> ByteString
fromForeignPtr fp s l = PS fp s l
{-# INLINE fromForeignPtr #-}
-- | /O(1)/ Deconstruct a ForeignPtr from a ByteString
toForeignPtr :: ByteString -> (ForeignPtr Word8, Int, Int) -- ^ (ptr, offset, length)
toForeignPtr (PS ps s l) = (ps, s, l)
{-# INLINE toForeignPtr #-}
-- | A way of creating ByteStrings outside the IO monad. The @Int@
-- argument gives the final size of the ByteString.
unsafeCreate :: Int -> (Ptr Word8 -> IO ()) -> ByteString
unsafeCreate l f = unsafeDupablePerformIO (create l f)
{-# INLINE unsafeCreate #-}
-- | Like 'unsafeCreate' but instead of giving the final size of the
-- ByteString, it is just an upper bound. The inner action returns
-- the actual size. Unlike 'createAndTrim' the ByteString is not
-- reallocated if the final size is less than the estimated size.
unsafeCreateUptoN :: Int -> (Ptr Word8 -> IO Int) -> ByteString
unsafeCreateUptoN l f = unsafeDupablePerformIO (createUptoN l f)
{-# INLINE unsafeCreateUptoN #-}
unsafeCreateUptoN' :: Int -> (Ptr Word8 -> IO (Int, a)) -> (ByteString, a)
unsafeCreateUptoN' l f = unsafeDupablePerformIO (createUptoN' l f)
{-# INLINE unsafeCreateUptoN' #-}
#ifndef __GLASGOW_HASKELL__
-- for Hugs, NHC etc
unsafeDupablePerformIO :: IO a -> a
unsafeDupablePerformIO = unsafePerformIO
#endif
-- | Create ByteString of size @l@ and use action @f@ to fill it's contents.
create :: Int -> (Ptr Word8 -> IO ()) -> IO ByteString
create l f = do
fp <- mallocByteString l
withForeignPtr fp $ \p -> f p
return $! PS fp 0 l
{-# INLINE create #-}
-- | Create ByteString of up to size size @l@ and use action @f@ to fill it's
-- contents which returns its true size.
createUptoN :: Int -> (Ptr Word8 -> IO Int) -> IO ByteString
createUptoN l f = do
fp <- mallocByteString l
l' <- withForeignPtr fp $ \p -> f p
assert (l' <= l) $ return $! PS fp 0 l'
{-# INLINE createUptoN #-}
-- | Create ByteString of up to size @l@ and use action @f@ to fill it's contents which returns its true size.
createUptoN' :: Int -> (Ptr Word8 -> IO (Int, a)) -> IO (ByteString, a)
createUptoN' l f = do
fp <- mallocByteString l
(l', res) <- withForeignPtr fp $ \p -> f p
assert (l' <= l) $ return (PS fp 0 l', res)
{-# INLINE createUptoN' #-}
-- | Given the maximum size needed and a function to make the contents
-- of a ByteString, createAndTrim makes the 'ByteString'. The generating
-- function is required to return the actual final size (<= the maximum
-- size), and the resulting byte array is realloced to this size.
--
-- createAndTrim is the main mechanism for creating custom, efficient
-- ByteString functions, using Haskell or C functions to fill the space.
--
createAndTrim :: Int -> (Ptr Word8 -> IO Int) -> IO ByteString
createAndTrim l f = do
fp <- mallocByteString l
withForeignPtr fp $ \p -> do
l' <- f p
if assert (l' <= l) $ l' >= l
then return $! PS fp 0 l
else create l' $ \p' -> memcpy p' p l'
{-# INLINE createAndTrim #-}
createAndTrim' :: Int -> (Ptr Word8 -> IO (Int, Int, a)) -> IO (ByteString, a)
createAndTrim' l f = do
fp <- mallocByteString l
withForeignPtr fp $ \p -> do
(off, l', res) <- f p
if assert (l' <= l) $ l' >= l
then return $! (PS fp 0 l, res)
else do ps <- create l' $ \p' ->
memcpy p' (p `plusPtr` off) l'
return $! (ps, res)
-- | Wrapper of 'mallocForeignPtrBytes' with faster implementation for GHC
--
mallocByteString :: Int -> IO (ForeignPtr a)
mallocByteString l = do
#ifdef __GLASGOW_HASKELL__
mallocPlainForeignPtrBytes l
#else
mallocForeignPtrBytes l
#endif
{-# INLINE mallocByteString #-}
------------------------------------------------------------------------
-- Implementations for Eq, Ord and Monoid instances
eq :: ByteString -> ByteString -> Bool
eq a@(PS fp off len) b@(PS fp' off' len')
| len /= len' = False -- short cut on length
| fp == fp' && off == off' = True -- short cut for the same string
| otherwise = compareBytes a b == EQ
{-# INLINE eq #-}
-- ^ still needed
compareBytes :: ByteString -> ByteString -> Ordering
compareBytes (PS _ _ 0) (PS _ _ 0) = EQ -- short cut for empty strings
compareBytes (PS fp1 off1 len1) (PS fp2 off2 len2) =
inlinePerformIO $
withForeignPtr fp1 $ \p1 ->
withForeignPtr fp2 $ \p2 -> do
i <- memcmp (p1 `plusPtr` off1) (p2 `plusPtr` off2) (min len1 len2)
return $! case i `compare` 0 of
EQ -> len1 `compare` len2
x -> x
append :: ByteString -> ByteString -> ByteString
append (PS _ _ 0) b = b
append a (PS _ _ 0) = a
append (PS fp1 off1 len1) (PS fp2 off2 len2) =
unsafeCreate (len1+len2) $ \destptr1 -> do
let destptr2 = destptr1 `plusPtr` len1
withForeignPtr fp1 $ \p1 -> memcpy destptr1 (p1 `plusPtr` off1) len1
withForeignPtr fp2 $ \p2 -> memcpy destptr2 (p2 `plusPtr` off2) len2
concat :: [ByteString] -> ByteString
concat [] = mempty
concat [bs] = bs
concat bss0 = unsafeCreate totalLen $ \ptr -> go bss0 ptr
where
totalLen = List.sum [ len | (PS _ _ len) <- bss0 ]
go [] !_ = return ()
go (PS fp off len:bss) !ptr = do
withForeignPtr fp $ \p -> memcpy ptr (p `plusPtr` off) len
go bss (ptr `plusPtr` len)
------------------------------------------------------------------------
-- | Conversion between 'Word8' and 'Char'. Should compile to a no-op.
w2c :: Word8 -> Char
#if !defined(__GLASGOW_HASKELL__)
w2c = chr . fromIntegral
#else
w2c = unsafeChr . fromIntegral
#endif
{-# INLINE w2c #-}
-- | Unsafe conversion between 'Char' and 'Word8'. This is a no-op and
-- silently truncates to 8 bits Chars > '\255'. It is provided as
-- convenience for ByteString construction.
c2w :: Char -> Word8
c2w = fromIntegral . ord
{-# INLINE c2w #-}
-- | Selects words corresponding to white-space characters in the Latin-1 range
-- ordered by frequency.
isSpaceWord8 :: Word8 -> Bool
isSpaceWord8 w =
w == 0x20 ||
w == 0x0A || -- LF, \n
w == 0x09 || -- HT, \t
w == 0x0C || -- FF, \f
w == 0x0D || -- CR, \r
w == 0x0B || -- VT, \v
w == 0xA0 -- spotted by QC..
{-# INLINE isSpaceWord8 #-}
-- | Selects white-space characters in the Latin-1 range
isSpaceChar8 :: Char -> Bool
isSpaceChar8 c =
c == ' ' ||
c == '\t' ||
c == '\n' ||
c == '\r' ||
c == '\f' ||
c == '\v' ||
c == '\xa0'
{-# INLINE isSpaceChar8 #-}
------------------------------------------------------------------------
-- | Just like unsafePerformIO, but we inline it. Big performance gains as
-- it exposes lots of things to further inlining. /Very unsafe/. In
-- particular, you should do no memory allocation inside an
-- 'inlinePerformIO' block. On Hugs this is just @unsafePerformIO@.
--
{-# INLINE inlinePerformIO #-}
inlinePerformIO :: IO a -> a
#if defined(__GLASGOW_HASKELL__)
inlinePerformIO (IO m) = case m realWorld# of (# _, r #) -> r
#else
inlinePerformIO = unsafePerformIO
#endif
-- ---------------------------------------------------------------------
--
-- Standard C functions
--
foreign import ccall unsafe "string.h strlen" c_strlen
:: CString -> IO CSize
foreign import ccall unsafe "static stdlib.h &free" c_free_finalizer
:: FunPtr (Ptr Word8 -> IO ())
foreign import ccall unsafe "string.h memchr" c_memchr
:: Ptr Word8 -> CInt -> CSize -> IO (Ptr Word8)
memchr :: Ptr Word8 -> Word8 -> CSize -> IO (Ptr Word8)
memchr p w s = c_memchr p (fromIntegral w) s
foreign import ccall unsafe "string.h memcmp" c_memcmp
:: Ptr Word8 -> Ptr Word8 -> CSize -> IO CInt
memcmp :: Ptr Word8 -> Ptr Word8 -> Int -> IO CInt
memcmp p q s = c_memcmp p q (fromIntegral s)
foreign import ccall unsafe "string.h memcpy" c_memcpy
:: Ptr Word8 -> Ptr Word8 -> CSize -> IO (Ptr Word8)
memcpy :: Ptr Word8 -> Ptr Word8 -> Int -> IO ()
memcpy p q s = c_memcpy p q (fromIntegral s) >> return ()
{-
foreign import ccall unsafe "string.h memmove" c_memmove
:: Ptr Word8 -> Ptr Word8 -> CSize -> IO (Ptr Word8)
memmove :: Ptr Word8 -> Ptr Word8 -> CSize -> IO ()
memmove p q s = do c_memmove p q s
return ()
-}
foreign import ccall unsafe "string.h memset" c_memset
:: Ptr Word8 -> CInt -> CSize -> IO (Ptr Word8)
memset :: Ptr Word8 -> Word8 -> CSize -> IO (Ptr Word8)
memset p w s = c_memset p (fromIntegral w) s
-- ---------------------------------------------------------------------
--
-- Uses our C code
--
foreign import ccall unsafe "static fpstring.h fps_reverse" c_reverse
:: Ptr Word8 -> Ptr Word8 -> CULong -> IO ()
foreign import ccall unsafe "static fpstring.h fps_intersperse" c_intersperse
:: Ptr Word8 -> Ptr Word8 -> CULong -> Word8 -> IO ()
foreign import ccall unsafe "static fpstring.h fps_maximum" c_maximum
:: Ptr Word8 -> CULong -> IO Word8
foreign import ccall unsafe "static fpstring.h fps_minimum" c_minimum
:: Ptr Word8 -> CULong -> IO Word8
foreign import ccall unsafe "static fpstring.h fps_count" c_count
:: Ptr Word8 -> CULong -> Word8 -> IO CULong
| jwiegley/ghc-release | libraries/bytestring/Data/ByteString/Internal.hs | gpl-3.0 | 23,651 | 0 | 19 | 5,633 | 4,782 | 2,601 | 2,181 | 283 | 3 |
main :: Bool -> Bool -> ()
main = \True False -> ()
| roberth/uu-helium | test/staticwarnings/Lambda2.hs | gpl-3.0 | 53 | 0 | 7 | 14 | 30 | 16 | 14 | 2 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.SWF.RespondDecisionTaskCompleted
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Used by deciders to tell the service that the 'DecisionTask' identified by the 'taskToken' has successfully completed. The 'decisions' argument specifies the list of
-- decisions made while processing the task.
--
-- A 'DecisionTaskCompleted' event is added to the workflow history. The 'executionContext' specified is attached to the event in the workflow execution history.
--
-- Access Control
--
-- If an IAM policy grants permission to use 'RespondDecisionTaskCompleted', it
-- can express permissions for the list of decisions in the 'decisions' parameter.
-- Each of the decisions has one or more parameters, much like a regular API
-- call. To allow for policies to be as readable as possible, you can express
-- permissions on decisions as if they were actual API calls, including applying
-- conditions to some parameters. For more information, see <http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html Using IAM to ManageAccess to Amazon SWF Workflows>.
--
-- <http://docs.aws.amazon.com/amazonswf/latest/apireference/API_RespondDecisionTaskCompleted.html>
module Network.AWS.SWF.RespondDecisionTaskCompleted
(
-- * Request
RespondDecisionTaskCompleted
-- ** Request constructor
, respondDecisionTaskCompleted
-- ** Request lenses
, rdtcDecisions
, rdtcExecutionContext
, rdtcTaskToken
-- * Response
, RespondDecisionTaskCompletedResponse
-- ** Response constructor
, respondDecisionTaskCompletedResponse
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.SWF.Types
import qualified GHC.Exts
data RespondDecisionTaskCompleted = RespondDecisionTaskCompleted
{ _rdtcDecisions :: List "decisions" Decision
, _rdtcExecutionContext :: Maybe Text
, _rdtcTaskToken :: Text
} deriving (Eq, Read, Show)
-- | 'RespondDecisionTaskCompleted' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rdtcDecisions' @::@ ['Decision']
--
-- * 'rdtcExecutionContext' @::@ 'Maybe' 'Text'
--
-- * 'rdtcTaskToken' @::@ 'Text'
--
respondDecisionTaskCompleted :: Text -- ^ 'rdtcTaskToken'
-> RespondDecisionTaskCompleted
respondDecisionTaskCompleted p1 = RespondDecisionTaskCompleted
{ _rdtcTaskToken = p1
, _rdtcDecisions = mempty
, _rdtcExecutionContext = Nothing
}
-- | The list of decisions (possibly empty) made by the decider while processing
-- this decision task. See the docs for the 'Decision' structure for details.
rdtcDecisions :: Lens' RespondDecisionTaskCompleted [Decision]
rdtcDecisions = lens _rdtcDecisions (\s a -> s { _rdtcDecisions = a }) . _List
-- | User defined context to add to workflow execution.
rdtcExecutionContext :: Lens' RespondDecisionTaskCompleted (Maybe Text)
rdtcExecutionContext =
lens _rdtcExecutionContext (\s a -> s { _rdtcExecutionContext = a })
-- | The 'taskToken' from the 'DecisionTask'.
--
-- 'taskToken' is generated by the service and should be treated as an opaque
-- value. If the task is passed to another process, its 'taskToken' must also be
-- passed. This enables it to provide its progress and respond with results.
rdtcTaskToken :: Lens' RespondDecisionTaskCompleted Text
rdtcTaskToken = lens _rdtcTaskToken (\s a -> s { _rdtcTaskToken = a })
data RespondDecisionTaskCompletedResponse = RespondDecisionTaskCompletedResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'RespondDecisionTaskCompletedResponse' constructor.
respondDecisionTaskCompletedResponse :: RespondDecisionTaskCompletedResponse
respondDecisionTaskCompletedResponse = RespondDecisionTaskCompletedResponse
instance ToPath RespondDecisionTaskCompleted where
toPath = const "/"
instance ToQuery RespondDecisionTaskCompleted where
toQuery = const mempty
instance ToHeaders RespondDecisionTaskCompleted
instance ToJSON RespondDecisionTaskCompleted where
toJSON RespondDecisionTaskCompleted{..} = object
[ "taskToken" .= _rdtcTaskToken
, "decisions" .= _rdtcDecisions
, "executionContext" .= _rdtcExecutionContext
]
instance AWSRequest RespondDecisionTaskCompleted where
type Sv RespondDecisionTaskCompleted = SWF
type Rs RespondDecisionTaskCompleted = RespondDecisionTaskCompletedResponse
request = post "RespondDecisionTaskCompleted"
response = nullResponse RespondDecisionTaskCompletedResponse
| kim/amazonka | amazonka-swf/gen/Network/AWS/SWF/RespondDecisionTaskCompleted.hs | mpl-2.0 | 5,522 | 0 | 10 | 1,037 | 510 | 314 | 196 | 61 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.Route53Domains.GetDomainDetail
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- This operation returns detailed information about the domain. The
-- domain\'s contact information is also returned as part of the output.
--
-- /See:/ <http://docs.aws.amazon.com/Route53/latest/APIReference/api-GetDomainDetail.html AWS API Reference> for GetDomainDetail.
module Network.AWS.Route53Domains.GetDomainDetail
(
-- * Creating a Request
getDomainDetail
, GetDomainDetail
-- * Request Lenses
, gddDomainName
-- * Destructuring the Response
, getDomainDetailResponse
, GetDomainDetailResponse
-- * Response Lenses
, gddrsTechPrivacy
, gddrsDNSSec
, gddrsWhoIsServer
, gddrsRegistryDomainId
, gddrsRegistrantPrivacy
, gddrsUpdatedDate
, gddrsAdminPrivacy
, gddrsAutoRenew
, gddrsAbuseContactPhone
, gddrsRegistrarURL
, gddrsAbuseContactEmail
, gddrsExpirationDate
, gddrsCreationDate
, gddrsRegistrarName
, gddrsReseller
, gddrsStatusList
, gddrsResponseStatus
, gddrsDomainName
, gddrsNameservers
, gddrsAdminContact
, gddrsRegistrantContact
, gddrsTechContact
) where
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.Route53Domains.Types
import Network.AWS.Route53Domains.Types.Product
-- | The GetDomainDetail request includes the following element.
--
-- /See:/ 'getDomainDetail' smart constructor.
newtype GetDomainDetail = GetDomainDetail'
{ _gddDomainName :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GetDomainDetail' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gddDomainName'
getDomainDetail
:: Text -- ^ 'gddDomainName'
-> GetDomainDetail
getDomainDetail pDomainName_ =
GetDomainDetail'
{ _gddDomainName = pDomainName_
}
-- | The name of a domain.
--
-- Type: String
--
-- Default: None
--
-- Constraints: The domain name can contain only the letters a through z,
-- the numbers 0 through 9, and hyphen (-). Internationalized Domain Names
-- are not supported.
--
-- Required: Yes
gddDomainName :: Lens' GetDomainDetail Text
gddDomainName = lens _gddDomainName (\ s a -> s{_gddDomainName = a});
instance AWSRequest GetDomainDetail where
type Rs GetDomainDetail = GetDomainDetailResponse
request = postJSON route53Domains
response
= receiveJSON
(\ s h x ->
GetDomainDetailResponse' <$>
(x .?> "TechPrivacy") <*> (x .?> "DnsSec") <*>
(x .?> "WhoIsServer")
<*> (x .?> "RegistryDomainId")
<*> (x .?> "RegistrantPrivacy")
<*> (x .?> "UpdatedDate")
<*> (x .?> "AdminPrivacy")
<*> (x .?> "AutoRenew")
<*> (x .?> "AbuseContactPhone")
<*> (x .?> "RegistrarUrl")
<*> (x .?> "AbuseContactEmail")
<*> (x .?> "ExpirationDate")
<*> (x .?> "CreationDate")
<*> (x .?> "RegistrarName")
<*> (x .?> "Reseller")
<*> (x .?> "StatusList" .!@ mempty)
<*> (pure (fromEnum s))
<*> (x .:> "DomainName")
<*> (x .?> "Nameservers" .!@ mempty)
<*> (x .:> "AdminContact")
<*> (x .:> "RegistrantContact")
<*> (x .:> "TechContact"))
instance ToHeaders GetDomainDetail where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("Route53Domains_v20140515.GetDomainDetail" ::
ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON GetDomainDetail where
toJSON GetDomainDetail'{..}
= object
(catMaybes [Just ("DomainName" .= _gddDomainName)])
instance ToPath GetDomainDetail where
toPath = const "/"
instance ToQuery GetDomainDetail where
toQuery = const mempty
-- | The GetDomainDetail response includes the following elements.
--
-- /See:/ 'getDomainDetailResponse' smart constructor.
data GetDomainDetailResponse = GetDomainDetailResponse'
{ _gddrsTechPrivacy :: !(Maybe Bool)
, _gddrsDNSSec :: !(Maybe Text)
, _gddrsWhoIsServer :: !(Maybe Text)
, _gddrsRegistryDomainId :: !(Maybe Text)
, _gddrsRegistrantPrivacy :: !(Maybe Bool)
, _gddrsUpdatedDate :: !(Maybe POSIX)
, _gddrsAdminPrivacy :: !(Maybe Bool)
, _gddrsAutoRenew :: !(Maybe Bool)
, _gddrsAbuseContactPhone :: !(Maybe Text)
, _gddrsRegistrarURL :: !(Maybe Text)
, _gddrsAbuseContactEmail :: !(Maybe Text)
, _gddrsExpirationDate :: !(Maybe POSIX)
, _gddrsCreationDate :: !(Maybe POSIX)
, _gddrsRegistrarName :: !(Maybe Text)
, _gddrsReseller :: !(Maybe Text)
, _gddrsStatusList :: !(Maybe [Text])
, _gddrsResponseStatus :: !Int
, _gddrsDomainName :: !Text
, _gddrsNameservers :: ![Nameserver]
, _gddrsAdminContact :: !(Sensitive ContactDetail)
, _gddrsRegistrantContact :: !(Sensitive ContactDetail)
, _gddrsTechContact :: !(Sensitive ContactDetail)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GetDomainDetailResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gddrsTechPrivacy'
--
-- * 'gddrsDNSSec'
--
-- * 'gddrsWhoIsServer'
--
-- * 'gddrsRegistryDomainId'
--
-- * 'gddrsRegistrantPrivacy'
--
-- * 'gddrsUpdatedDate'
--
-- * 'gddrsAdminPrivacy'
--
-- * 'gddrsAutoRenew'
--
-- * 'gddrsAbuseContactPhone'
--
-- * 'gddrsRegistrarURL'
--
-- * 'gddrsAbuseContactEmail'
--
-- * 'gddrsExpirationDate'
--
-- * 'gddrsCreationDate'
--
-- * 'gddrsRegistrarName'
--
-- * 'gddrsReseller'
--
-- * 'gddrsStatusList'
--
-- * 'gddrsResponseStatus'
--
-- * 'gddrsDomainName'
--
-- * 'gddrsNameservers'
--
-- * 'gddrsAdminContact'
--
-- * 'gddrsRegistrantContact'
--
-- * 'gddrsTechContact'
getDomainDetailResponse
:: Int -- ^ 'gddrsResponseStatus'
-> Text -- ^ 'gddrsDomainName'
-> ContactDetail -- ^ 'gddrsAdminContact'
-> ContactDetail -- ^ 'gddrsRegistrantContact'
-> ContactDetail -- ^ 'gddrsTechContact'
-> GetDomainDetailResponse
getDomainDetailResponse pResponseStatus_ pDomainName_ pAdminContact_ pRegistrantContact_ pTechContact_ =
GetDomainDetailResponse'
{ _gddrsTechPrivacy = Nothing
, _gddrsDNSSec = Nothing
, _gddrsWhoIsServer = Nothing
, _gddrsRegistryDomainId = Nothing
, _gddrsRegistrantPrivacy = Nothing
, _gddrsUpdatedDate = Nothing
, _gddrsAdminPrivacy = Nothing
, _gddrsAutoRenew = Nothing
, _gddrsAbuseContactPhone = Nothing
, _gddrsRegistrarURL = Nothing
, _gddrsAbuseContactEmail = Nothing
, _gddrsExpirationDate = Nothing
, _gddrsCreationDate = Nothing
, _gddrsRegistrarName = Nothing
, _gddrsReseller = Nothing
, _gddrsStatusList = Nothing
, _gddrsResponseStatus = pResponseStatus_
, _gddrsDomainName = pDomainName_
, _gddrsNameservers = mempty
, _gddrsAdminContact = _Sensitive # pAdminContact_
, _gddrsRegistrantContact = _Sensitive # pRegistrantContact_
, _gddrsTechContact = _Sensitive # pTechContact_
}
-- | Specifies whether contact information for the tech contact is concealed
-- from WHOIS queries. If the value is 'true', WHOIS (\"who is\") queries
-- will return contact information for our registrar partner, Gandi,
-- instead of the contact information that you enter.
--
-- Type: Boolean
gddrsTechPrivacy :: Lens' GetDomainDetailResponse (Maybe Bool)
gddrsTechPrivacy = lens _gddrsTechPrivacy (\ s a -> s{_gddrsTechPrivacy = a});
-- | Reserved for future use.
gddrsDNSSec :: Lens' GetDomainDetailResponse (Maybe Text)
gddrsDNSSec = lens _gddrsDNSSec (\ s a -> s{_gddrsDNSSec = a});
-- | The fully qualified name of the WHOIS server that can answer the WHOIS
-- query for the domain.
--
-- Type: String
gddrsWhoIsServer :: Lens' GetDomainDetailResponse (Maybe Text)
gddrsWhoIsServer = lens _gddrsWhoIsServer (\ s a -> s{_gddrsWhoIsServer = a});
-- | Reserved for future use.
gddrsRegistryDomainId :: Lens' GetDomainDetailResponse (Maybe Text)
gddrsRegistryDomainId = lens _gddrsRegistryDomainId (\ s a -> s{_gddrsRegistryDomainId = a});
-- | Specifies whether contact information for the registrant contact is
-- concealed from WHOIS queries. If the value is 'true', WHOIS (\"who is\")
-- queries will return contact information for our registrar partner,
-- Gandi, instead of the contact information that you enter.
--
-- Type: Boolean
gddrsRegistrantPrivacy :: Lens' GetDomainDetailResponse (Maybe Bool)
gddrsRegistrantPrivacy = lens _gddrsRegistrantPrivacy (\ s a -> s{_gddrsRegistrantPrivacy = a});
-- | The last updated date of the domain as found in the response to a WHOIS
-- query. The date format is Unix time.
gddrsUpdatedDate :: Lens' GetDomainDetailResponse (Maybe UTCTime)
gddrsUpdatedDate = lens _gddrsUpdatedDate (\ s a -> s{_gddrsUpdatedDate = a}) . mapping _Time;
-- | Specifies whether contact information for the admin contact is concealed
-- from WHOIS queries. If the value is 'true', WHOIS (\"who is\") queries
-- will return contact information for our registrar partner, Gandi,
-- instead of the contact information that you enter.
--
-- Type: Boolean
gddrsAdminPrivacy :: Lens' GetDomainDetailResponse (Maybe Bool)
gddrsAdminPrivacy = lens _gddrsAdminPrivacy (\ s a -> s{_gddrsAdminPrivacy = a});
-- | Specifies whether the domain registration is set to renew automatically.
--
-- Type: Boolean
gddrsAutoRenew :: Lens' GetDomainDetailResponse (Maybe Bool)
gddrsAutoRenew = lens _gddrsAutoRenew (\ s a -> s{_gddrsAutoRenew = a});
-- | Phone number for reporting abuse.
--
-- Type: String
gddrsAbuseContactPhone :: Lens' GetDomainDetailResponse (Maybe Text)
gddrsAbuseContactPhone = lens _gddrsAbuseContactPhone (\ s a -> s{_gddrsAbuseContactPhone = a});
-- | Web address of the registrar.
--
-- Type: String
gddrsRegistrarURL :: Lens' GetDomainDetailResponse (Maybe Text)
gddrsRegistrarURL = lens _gddrsRegistrarURL (\ s a -> s{_gddrsRegistrarURL = a});
-- | Email address to contact to report incorrect contact information for a
-- domain, to report that the domain is being used to send spam, to report
-- that someone is cybersquatting on a domain name, or report some other
-- type of abuse.
--
-- Type: String
gddrsAbuseContactEmail :: Lens' GetDomainDetailResponse (Maybe Text)
gddrsAbuseContactEmail = lens _gddrsAbuseContactEmail (\ s a -> s{_gddrsAbuseContactEmail = a});
-- | The date when the registration for the domain is set to expire. The date
-- format is Unix time.
gddrsExpirationDate :: Lens' GetDomainDetailResponse (Maybe UTCTime)
gddrsExpirationDate = lens _gddrsExpirationDate (\ s a -> s{_gddrsExpirationDate = a}) . mapping _Time;
-- | The date when the domain was created as found in the response to a WHOIS
-- query. The date format is Unix time.
gddrsCreationDate :: Lens' GetDomainDetailResponse (Maybe UTCTime)
gddrsCreationDate = lens _gddrsCreationDate (\ s a -> s{_gddrsCreationDate = a}) . mapping _Time;
-- | Name of the registrar of the domain as identified in the registry.
-- Amazon Route 53 domains are registered by registrar Gandi. The value is
-- '\"GANDI SAS\"'.
--
-- Type: String
gddrsRegistrarName :: Lens' GetDomainDetailResponse (Maybe Text)
gddrsRegistrarName = lens _gddrsRegistrarName (\ s a -> s{_gddrsRegistrarName = a});
-- | Reseller of the domain. Domains registered or transferred using Amazon
-- Route 53 domains will have '\"Amazon\"' as the reseller.
--
-- Type: String
gddrsReseller :: Lens' GetDomainDetailResponse (Maybe Text)
gddrsReseller = lens _gddrsReseller (\ s a -> s{_gddrsReseller = a});
-- | An array of domain name status codes, also known as Extensible
-- Provisioning Protocol (EPP) status codes.
--
-- ICANN, the organization that maintains a central database of domain
-- names, has developed a set of domain name status codes that tell you the
-- status of a variety of operations on a domain name, for example,
-- registering a domain name, transferring a domain name to another
-- registrar, renewing the registration for a domain name, and so on. All
-- registrars use this same set of status codes.
--
-- For a current list of domain name status codes and an explanation of
-- what each code means, go to the <https://www.icann.org/ ICANN website>
-- and search for 'epp status codes'. (Search on the ICANN website; web
-- searches sometimes return an old version of the document.)
--
-- Type: Array of String
gddrsStatusList :: Lens' GetDomainDetailResponse [Text]
gddrsStatusList = lens _gddrsStatusList (\ s a -> s{_gddrsStatusList = a}) . _Default . _Coerce;
-- | The response status code.
gddrsResponseStatus :: Lens' GetDomainDetailResponse Int
gddrsResponseStatus = lens _gddrsResponseStatus (\ s a -> s{_gddrsResponseStatus = a});
-- | The name of a domain.
--
-- Type: String
gddrsDomainName :: Lens' GetDomainDetailResponse Text
gddrsDomainName = lens _gddrsDomainName (\ s a -> s{_gddrsDomainName = a});
-- | The name of the domain.
--
-- Type: String
gddrsNameservers :: Lens' GetDomainDetailResponse [Nameserver]
gddrsNameservers = lens _gddrsNameservers (\ s a -> s{_gddrsNameservers = a}) . _Coerce;
-- | Provides details about the domain administrative contact.
--
-- Type: Complex
--
-- Children: 'FirstName', 'MiddleName', 'LastName', 'ContactType',
-- 'OrganizationName', 'AddressLine1', 'AddressLine2', 'City', 'State',
-- 'CountryCode', 'ZipCode', 'PhoneNumber', 'Email', 'Fax', 'ExtraParams'
gddrsAdminContact :: Lens' GetDomainDetailResponse ContactDetail
gddrsAdminContact = lens _gddrsAdminContact (\ s a -> s{_gddrsAdminContact = a}) . _Sensitive;
-- | Provides details about the domain registrant.
--
-- Type: Complex
--
-- Children: 'FirstName', 'MiddleName', 'LastName', 'ContactType',
-- 'OrganizationName', 'AddressLine1', 'AddressLine2', 'City', 'State',
-- 'CountryCode', 'ZipCode', 'PhoneNumber', 'Email', 'Fax', 'ExtraParams'
gddrsRegistrantContact :: Lens' GetDomainDetailResponse ContactDetail
gddrsRegistrantContact = lens _gddrsRegistrantContact (\ s a -> s{_gddrsRegistrantContact = a}) . _Sensitive;
-- | Provides details about the domain technical contact.
--
-- Type: Complex
--
-- Children: 'FirstName', 'MiddleName', 'LastName', 'ContactType',
-- 'OrganizationName', 'AddressLine1', 'AddressLine2', 'City', 'State',
-- 'CountryCode', 'ZipCode', 'PhoneNumber', 'Email', 'Fax', 'ExtraParams'
gddrsTechContact :: Lens' GetDomainDetailResponse ContactDetail
gddrsTechContact = lens _gddrsTechContact (\ s a -> s{_gddrsTechContact = a}) . _Sensitive;
| fmapfmapfmap/amazonka | amazonka-route53-domains/gen/Network/AWS/Route53Domains/GetDomainDetail.hs | mpl-2.0 | 15,725 | 0 | 32 | 3,214 | 2,346 | 1,406 | 940 | 241 | 1 |
-- Copyright (c) 2014 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -Wall -Werror #-}
-- | Defines a class of monads representing access to source code.
-- This is useful for implementing lexers, as well as implementing a
-- diagnostic message printer.
module Control.Monad.SourceFiles.Class(
MonadSourceFiles(..)
) where
import Control.Monad.Cont
import Control.Monad.Except
import Control.Monad.List
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Trans.Journal
import Control.Monad.Writer
import Data.Array
import Data.ByteString hiding (map)
import Data.Position.Filename
-- | Class of monads that have access to source code.
class Monad m => MonadSourceFiles m where
-- | Get all lines from the source file.
sourceFile :: Filename
-- ^ The path to the source file.
-> m (Array Word ByteString)
-- ^ An array of all lines in the source file.
sourceFileSpan :: Filename
-- ^ The path to the source file.
-> Word
-- ^ The starting line
-> Word
-- ^ The ending line
-> m [ByteString]
sourceFileSpan fpath start end =
do
fdata <- sourceFile fpath
return $! map (fdata !) [start..end]
instance MonadSourceFiles m => MonadSourceFiles (ContT r m) where
sourceFile = lift . sourceFile
instance (MonadSourceFiles m) => MonadSourceFiles (ExceptT e m) where
sourceFile = lift . sourceFile
instance (MonadSourceFiles m) => MonadSourceFiles (JournalT e m) where
sourceFile = lift . sourceFile
instance MonadSourceFiles m => MonadSourceFiles (ListT m) where
sourceFile = lift . sourceFile
instance MonadSourceFiles m => MonadSourceFiles (ReaderT r m) where
sourceFile = lift . sourceFile
instance MonadSourceFiles m => MonadSourceFiles (StateT s m) where
sourceFile = lift . sourceFile
instance (MonadSourceFiles m, Monoid w) => MonadSourceFiles (WriterT w m) where
sourceFile = lift . sourceFile
| emc2/compiler-misc | src/Control/Monad/SourceFiles/Class.hs | bsd-3-clause | 3,514 | 0 | 11 | 725 | 455 | 262 | 193 | 38 | 0 |
-- | Elliptic Curve Arithmetic.
--
-- /WARNING:/ These functions are vulnerable to timing attacks.
{-# LANGUAGE ScopedTypeVariables #-}
module Crypto.ECC.Simple.Prim
( scalarGenerate
, scalarFromInteger
, pointAdd
, pointDouble
, pointBaseMul
, pointMul
, pointAddTwoMuls
, pointFromIntegers
, isPointAtInfinity
, isPointValid
) where
import Data.Maybe
import Crypto.Internal.Imports
import Crypto.Internal.Proxy
import Crypto.Number.ModArithmetic
import Crypto.Number.F2m
import Crypto.Number.Generate (generateBetween)
import Crypto.ECC.Simple.Types
import Crypto.Error
import Crypto.Random
-- | Generate a valid scalar for a specific Curve
scalarGenerate :: forall randomly curve . (MonadRandom randomly, Curve curve) => randomly (Scalar curve)
scalarGenerate =
Scalar <$> generateBetween 1 (n - 1)
where
n = curveEccN $ curveParameters (Proxy :: Proxy curve)
scalarFromInteger :: forall curve . Curve curve => Integer -> CryptoFailable (Scalar curve)
scalarFromInteger n
| n < 0 || n >= mx = CryptoFailed $ CryptoError_EcScalarOutOfBounds
| otherwise = CryptoPassed $ Scalar n
where
mx = case curveType (Proxy :: Proxy curve) of
CurveBinary (CurveBinaryParam b) -> b
CurvePrime (CurvePrimeParam p) -> p
--TODO: Extract helper function for `fromMaybe PointO...`
-- | Elliptic Curve point negation:
-- @pointNegate p@ returns point @q@ such that @pointAdd p q == PointO@.
pointNegate :: Curve curve => Point curve -> Point curve
pointNegate PointO = PointO
pointNegate point@(Point x y) =
case curveType point of
CurvePrime {} -> Point x (-y)
CurveBinary {} -> Point x (x `addF2m` y)
-- | Elliptic Curve point addition.
--
-- /WARNING:/ Vulnerable to timing attacks.
pointAdd :: Curve curve => Point curve -> Point curve -> Point curve
pointAdd PointO PointO = PointO
pointAdd PointO q = q
pointAdd p PointO = p
pointAdd p q
| p == q = pointDouble p
| p == pointNegate q = PointO
pointAdd point@(Point xp yp) (Point xq yq) =
case ty of
CurvePrime (CurvePrimeParam pr) -> fromMaybe PointO $ do
s <- divmod (yp - yq) (xp - xq) pr
let xr = (s ^ (2::Int) - xp - xq) `mod` pr
yr = (s * (xp - xr) - yp) `mod` pr
return $ Point xr yr
CurveBinary (CurveBinaryParam fx) -> fromMaybe PointO $ do
s <- divF2m fx (yp `addF2m` yq) (xp `addF2m` xq)
let xr = mulF2m fx s s `addF2m` s `addF2m` xp `addF2m` xq `addF2m` a
yr = mulF2m fx s (xp `addF2m` xr) `addF2m` xr `addF2m` yp
return $ Point xr yr
where
ty = curveType point
cc = curveParameters point
a = curveEccA cc
-- | Elliptic Curve point doubling.
--
-- /WARNING:/ Vulnerable to timing attacks.
--
-- This perform the following calculation:
-- > lambda = (3 * xp ^ 2 + a) / 2 yp
-- > xr = lambda ^ 2 - 2 xp
-- > yr = lambda (xp - xr) - yp
--
-- With binary curve:
-- > xp == 0 => P = O
-- > otherwise =>
-- > s = xp + (yp / xp)
-- > xr = s ^ 2 + s + a
-- > yr = xp ^ 2 + (s+1) * xr
--
pointDouble :: Curve curve => Point curve -> Point curve
pointDouble PointO = PointO
pointDouble point@(Point xp yp) =
case ty of
CurvePrime (CurvePrimeParam pr) -> fromMaybe PointO $ do
lambda <- divmod (3 * xp ^ (2::Int) + a) (2 * yp) pr
let xr = (lambda ^ (2::Int) - 2 * xp) `mod` pr
yr = (lambda * (xp - xr) - yp) `mod` pr
return $ Point xr yr
CurveBinary (CurveBinaryParam fx)
| xp == 0 -> PointO
| otherwise -> fromMaybe PointO $ do
s <- return . addF2m xp =<< divF2m fx yp xp
let xr = mulF2m fx s s `addF2m` s `addF2m` a
yr = mulF2m fx xp xp `addF2m` mulF2m fx xr (s `addF2m` 1)
return $ Point xr yr
where
ty = curveType point
cc = curveParameters point
a = curveEccA cc
-- | Elliptic curve point multiplication using the base
--
-- /WARNING:/ Vulnerable to timing attacks.
pointBaseMul :: Curve curve => Scalar curve -> Point curve
pointBaseMul n = pointMul n (curveEccG $ curveParameters (Proxy :: Proxy curve))
-- | Elliptic curve point multiplication (double and add algorithm).
--
-- /WARNING:/ Vulnerable to timing attacks.
pointMul :: Curve curve => Scalar curve -> Point curve -> Point curve
pointMul _ PointO = PointO
pointMul (Scalar n) p
| n == 0 = PointO
| n == 1 = p
| odd n = pointAdd p (pointMul (Scalar (n - 1)) p)
| otherwise = pointMul (Scalar (n `div` 2)) (pointDouble p)
-- | Elliptic curve double-scalar multiplication (uses Shamir's trick).
--
-- > pointAddTwoMuls n1 p1 n2 p2 == pointAdd (pointMul n1 p1)
-- > (pointMul n2 p2)
--
-- /WARNING:/ Vulnerable to timing attacks.
pointAddTwoMuls :: Curve curve => Scalar curve -> Point curve -> Scalar curve -> Point curve -> Point curve
pointAddTwoMuls _ PointO _ PointO = PointO
pointAddTwoMuls _ PointO n2 p2 = pointMul n2 p2
pointAddTwoMuls n1 p1 _ PointO = pointMul n1 p1
pointAddTwoMuls (Scalar n1) p1 (Scalar n2) p2 = go (n1, n2)
where
p0 = pointAdd p1 p2
go (0, 0 ) = PointO
go (k1, k2) =
let q = pointDouble $ go (k1 `div` 2, k2 `div` 2)
in case (odd k1, odd k2) of
(True , True ) -> pointAdd p0 q
(True , False ) -> pointAdd p1 q
(False , True ) -> pointAdd p2 q
(False , False ) -> q
-- | Check if a point is the point at infinity.
isPointAtInfinity :: Point curve -> Bool
isPointAtInfinity PointO = True
isPointAtInfinity _ = False
-- | Make a point on a curve from integer (x,y) coordinate
--
-- if the point is not valid related to the curve then an error is
-- returned instead of a point
pointFromIntegers :: forall curve . Curve curve => (Integer, Integer) -> CryptoFailable (Point curve)
pointFromIntegers (x,y)
| isPointValid (Proxy :: Proxy curve) x y = CryptoPassed $ Point x y
| otherwise = CryptoFailed $ CryptoError_PointCoordinatesInvalid
-- | check if a point is on specific curve
--
-- This perform three checks:
--
-- * x is not out of range
-- * y is not out of range
-- * the equation @y^2 = x^3 + a*x + b (mod p)@ holds
isPointValid :: Curve curve => proxy curve -> Integer -> Integer -> Bool
isPointValid proxy x y =
case ty of
CurvePrime (CurvePrimeParam p) ->
let a = curveEccA cc
b = curveEccB cc
eqModP z1 z2 = (z1 `mod` p) == (z2 `mod` p)
isValid e = e >= 0 && e < p
in isValid x && isValid y && (y ^ (2 :: Int)) `eqModP` (x ^ (3 :: Int) + a * x + b)
CurveBinary (CurveBinaryParam fx) ->
let a = curveEccA cc
b = curveEccB cc
add = addF2m
mul = mulF2m fx
isValid e = modF2m fx e == e
in and [ isValid x
, isValid y
, ((((x `add` a) `mul` x `add` y) `mul` x) `add` b `add` (squareF2m fx y)) == 0
]
where
ty = curveType proxy
cc = curveParameters proxy
-- | div and mod
divmod :: Integer -> Integer -> Integer -> Maybe Integer
divmod y x m = do
i <- inverse (x `mod` m) m
return $ y * i `mod` m
| tekul/cryptonite | Crypto/ECC/Simple/Prim.hs | bsd-3-clause | 7,400 | 0 | 24 | 2,197 | 2,275 | 1,196 | 1,079 | 133 | 5 |
module Koan where
allEnrolled :: Bool
allEnrolled = False
| Kheldar/hw-koans | koan/Koan.hs | bsd-3-clause | 59 | 0 | 4 | 10 | 14 | 9 | 5 | 3 | 1 |
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, DeriveDataTypeable, PatternGuards #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.Mosaic
-- Copyright : (c) 2009 Adam Vogt, 2007 James Webb
-- License : BSD-style (see xmonad/LICENSE)
--
-- Maintainer : vogt.adam<at>gmail.com
-- Stability : unstable
-- Portability : unportable
--
-- Based on MosaicAlt, but aspect ratio messages always change the aspect
-- ratios, and rearranging the window stack changes the window sizes.
--
-----------------------------------------------------------------------------
module XMonad.Layout.Mosaic (
-- * Usage
-- $usage
Aspect(..)
,mosaic
,changeMaster
,changeFocused
,Mosaic
)
where
import Prelude hiding (sum)
import XMonad(Typeable,
LayoutClass(doLayout, handleMessage, pureMessage, description),
Message, X, fromMessage, withWindowSet, Resize(..),
splitHorizontallyBy, splitVerticallyBy, sendMessage, Rectangle)
import qualified XMonad.StackSet as W
import Control.Arrow(second, first)
import Control.Monad(mplus)
import Data.Foldable(Foldable,foldMap, sum)
import Data.Function(on)
import Data.List(sortBy)
import Data.Monoid(Monoid,mempty, mappend)
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Layout.Mosaic
--
-- Then edit your @layoutHook@ by adding the Mosaic layout:
--
-- > myLayout = mosaic 2 [3,2] ||| Full ||| etc..
-- > main = xmonad $ defaultConfig { layoutHook = myLayout }
--
-- Unfortunately, infinite lists break serialization, so don't use them. And if
-- the list is too short, it is extended with @++ repeat 1@, which covers the
-- main use case.
--
-- To change the choice in aspect ratio and the relative sizes of windows, add
-- to your keybindings:
--
-- > , ((modm, xK_a), sendMessage Taller)
-- > , ((modm, xK_z), sendMessage Wider)
--
-- > , ((modm, xK_r), sendMessage Reset)
--
-- For more detailed instructions on editing the layoutHook see:
--
-- "XMonad.Doc.Extending#Editing_the_layout_hook"
data Aspect
= Taller
| Wider
| Reset
| SlopeMod ([Rational] -> [Rational])
deriving (Typeable)
instance Message Aspect
-- | The relative magnitudes (the sign is ignored) of the rational numbers in
-- the second argument determine the relative areas that the windows receive.
-- The first number represents the size of the master window, the second is for
-- the next window in the stack, and so on.
--
-- The list is extended with @++ repeat 1@, so @mosaic 1.5 []@ is like a
-- resizable grid.
--
-- The first parameter is the multiplicative factor to use when responding to
-- the 'Expand' message.
mosaic :: Rational -> [Rational] -> Mosaic a
mosaic = Mosaic Nothing
data Mosaic a = -- | True to override the aspect, current index, maximum index
Mosaic (Maybe(Bool,Rational,Int)) Rational [Rational] deriving (Read,Show)
instance LayoutClass Mosaic a where
description = const "Mosaic"
pureMessage (Mosaic Nothing _ _) _ = Nothing
pureMessage (Mosaic (Just(_,ix,mix)) delta ss) ms = fromMessage ms >>= ixMod
where ixMod Taller | round ix >= mix = Nothing
| otherwise = Just $ Mosaic (Just(False,succ ix,mix)) delta ss
ixMod Wider | round ix <= (0::Integer) = Nothing
| otherwise = Just $ Mosaic (Just(False,pred ix,mix)) delta ss
ixMod Reset = Just $ Mosaic Nothing delta ss
ixMod (SlopeMod f) = Just $ Mosaic (Just(False,ix,mix)) delta (f ss)
handleMessage l@(Mosaic _ delta _) ms
| Just Expand <- fromMessage ms = changeFocused (*delta) >> return Nothing
| Just Shrink <- fromMessage ms = changeFocused (/delta) >> return Nothing
| otherwise = return $ pureMessage l ms
doLayout (Mosaic state delta ss) r st = let
ssExt = zipWith const (ss ++ repeat 1) $ W.integrate st
rects = splits r ssExt
nls = length rects
fi = fromIntegral
nextIx (ov,ix,mix)
| mix <= 0 || ov = fromIntegral $ nls `div` 2
| otherwise = max 0 $ (*fi (pred nls)) $ min 1 $ ix / fi mix
rect = rects !! maybe (nls `div` 2) round (nextIx `fmap` state)
state' = fmap (\x@(ov,_,_) -> (ov,nextIx x,pred nls)) state
`mplus` Just (True,fromIntegral nls / 2,pred nls)
ss' = maybe ss (const ss `either` const ssExt) $ zipRemain ss ssExt
in return (zip (W.integrate st) rect, Just $ Mosaic state' delta ss')
zipRemain :: [a] -> [b] -> Maybe (Either [a] [b])
zipRemain (_:xs) (_:ys) = zipRemain xs ys
zipRemain [] [] = Nothing
zipRemain [] y = Just (Right y)
zipRemain x [] = Just (Left x)
-- | These sample functions are meant to be applied to the list of window sizes
-- through the 'SlopeMod' message.
changeMaster :: (Rational -> Rational) -> X ()
changeMaster = sendMessage . SlopeMod . onHead
-- | Apply a function to the Rational that represents the currently focused
-- window.
--
-- 'Expand' and 'Shrink' messages are responded to with @changeFocused
-- (*delta)@ or @changeFocused (delta/)@ where @delta@ is the first argument to
-- 'mosaic'.
--
-- This is exported because other functions (ex. @const 1@, @(+1)@) may be
-- useful to apply to the current area.
changeFocused :: (Rational -> Rational) -> X ()
changeFocused f = withWindowSet $ sendMessage . SlopeMod
. maybe id (mulIx . length . W.up)
. W.stack . W.workspace . W.current
where mulIx i = uncurry (++) . second (onHead f) . splitAt i
onHead :: (a -> a) -> [a] -> [a]
onHead f = uncurry (++) . first (fmap f) . splitAt 1
splits :: Rectangle -> [Rational] -> [[Rectangle]]
splits rect = map (reverse . map snd . sortBy (compare `on` fst))
. splitsL rect . makeTree snd . zip [1..]
. normalize . reverse . map abs
splitsL :: Rectangle -> Tree (Int,Rational) -> [[(Int,Rectangle)]]
splitsL _rect Empty = []
splitsL rect (Leaf (x,_)) = [[(x,rect)]]
splitsL rect (Branch l r) = do
let mkSplit f = f ((sumSnd l /) $ sumSnd l + sumSnd r) rect
sumSnd = sum . fmap snd
(rl,rr) <- map mkSplit [splitVerticallyBy,splitHorizontallyBy]
splitsL rl l `interleave` splitsL rr r
-- like zipWith (++), but when one list is shorter, its elements are duplicated
-- so that they match
interleave :: [[a]] -> [[a]] -> [[a]]
interleave xs ys | lx > ly = zc xs (extend lx ys)
| otherwise = zc (extend ly xs) ys
where lx = length xs
ly = length ys
zc = zipWith (++)
extend :: Int -> [a] -> [a]
extend n pat = do
(p,e) <- zip pat $ replicate m True ++ repeat False
[p | e] ++ replicate d p
where (d,m) = n `divMod` length pat
normalize :: Fractional a => [a] -> [a]
normalize x = let s = sum x in map (/s) x
data Tree a = Branch (Tree a) (Tree a) | Leaf a | Empty
instance Foldable Tree where
foldMap _f Empty = mempty
foldMap f (Leaf x) = f x
foldMap f (Branch l r) = foldMap f l `mappend` foldMap f r
instance Functor Tree where
fmap f (Leaf x) = Leaf $ f x
fmap f (Branch l r) = Branch (fmap f l) (fmap f r)
fmap _ Empty = Empty
instance Monoid (Tree a) where
mempty = Empty
mappend Empty x = x
mappend x Empty = x
mappend x y = Branch x y
makeTree :: (Num a1, Ord a1) => (a -> a1) -> [a] -> Tree a
makeTree _ [] = Empty
makeTree _ [x] = Leaf x
makeTree f xs = Branch (makeTree f a) (makeTree f b)
where ((a,b),_) = foldr go (([],[]),(0,0)) xs
go n ((ls,rs),(l,r))
| l > r = ((ls,n:rs),(l,f n+r))
| otherwise = ((n:ls,rs),(f n+l,r))
| adinapoli/xmonad-contrib | XMonad/Layout/Mosaic.hs | bsd-3-clause | 7,814 | 0 | 18 | 1,934 | 2,449 | 1,321 | 1,128 | 118 | 1 |
module Options.OptimizationLevels where
import Types
optimizationLevelsOptions :: [Flag]
optimizationLevelsOptions =
[ flag { flagName = "-O0"
, flagDescription = "Disable optimisations (default)"
, flagType = DynamicFlag
, flagReverse = "-O"
}
, flag { flagName = "-O, -O1"
, flagDescription = "Enable level 1 optimisations"
, flagType = DynamicFlag
, flagReverse = "-O0"
}
, flag { flagName = "-O2"
, flagDescription = "Enable level 2 optimisations"
, flagType = DynamicFlag
, flagReverse = "-O0"
}
, flag { flagName = "-Odph"
, flagDescription =
"Enable level 2 optimisations, set "++
"``-fmax-simplifier-iterations=20`` "++
"and ``-fsimplifier-phases=3``."
, flagType = DynamicFlag
}
]
| siddhanathan/ghc | utils/mkUserGuidePart/Options/OptimizationLevels.hs | bsd-3-clause | 870 | 0 | 9 | 281 | 140 | 90 | 50 | 22 | 1 |
module T5385a where
data T = Int ::: Int
| urbanslug/ghc | testsuite/tests/rename/should_fail/T5385a.hs | bsd-3-clause | 42 | 0 | 6 | 10 | 15 | 9 | 6 | 2 | 0 |
digitSum :: (Integral a) => a -> a
digitSum 0 = 0
digitSum x = digit + (digitSum rest)
where (rest, digit) = x `divMod` 10
fac n = product [1..n]
main = print $ digitSum $ fac 100
| derdon/euler-solutions | 20/20.hs | isc | 186 | 0 | 7 | 45 | 97 | 51 | 46 | 6 | 1 |
import Data.List
import Control.Applicative
import Control.Arrow
import Control.Monad
import RankSelection
type Matrix a = (Int->Int->a, Int, Int)
-- The input is an matrix sorted in both row and column order
-- This selects the kth smallest element. (0th is the smallest)
selectMatrixRank :: Ord a => Int -> Matrix a -> a
selectMatrixRank k (f,n,m)
| k >= n*m || k < 0 = error "rank doesn't exist"
| otherwise = fst $ fst $ biselect k k (f', min n (k+1), min m (k+1))
where f' x y= (f x y, (x, y))
biselect :: Ord a => Int -> Int -> Matrix a -> (a,a)
biselect lb ub (f',n',m') = join (***) (selectRank values) (lb-ra, ub-ra)
where mat@(f,n,m)
| n' > m' = (flip f', m', n')
| otherwise = (f', n', m')
(a, b)
| n < 3 = (f 0 0, f (n-1) (m-1))
| otherwise = biselect lb' ub' halfMat
(lb', ub') = (lb `div` 2, min ((ub `div` 2) + n) (n * hm - 1))
(ra, values) = (rankInMatrix mat a, selectRange mat a b)
halfMat
| even m = (\x y->f x (if y < hm - 1 then 2 * y else 2 * y - 1), n, hm)
| odd m = (\x y->f x (2*y), n, hm)
hm = m `div` 2 + 1
-- the rank of an element in the matrix
rankInMatrix :: Ord a => Matrix a -> a -> Int
rankInMatrix mat a = sum (map (\(_,y)->1+y) $ frontier mat a)-1
-- select all elements x in the matrix such that a <= x <= b
selectRange :: Ord a => Matrix a -> a -> a -> [a]
selectRange mat@(f,_,_) a b = concatMap search (frontier mat b)
where search (x,y) = takeWhile (>=a) $ map (f x) [y,y-1..0]
frontier :: Ord a => Matrix a -> a -> [(Int,Int)]
frontier (f,n,m) b = step 0 (m-1)
where step i j
| i > n-1 || j < 0 = []
| f i j <= b = (i,j):step (i+1) j
| otherwise = step i (j-1)
-- toString (f, n, m) = show (n,m) ++ show [[f i j|i<-[0..n-1]]|j<-[0..m-1]]
--listVersion (f,n,m) = [f i j|i<-[0..n-1],j<-[0..m-1]]
--trace ("bisecting call "++show lb'++" "++ show ub'++ " elements "++ show a ++ " "++show b ++ toString (half mat)) $
mapf f x y= (f x y, (x, y))
matrix :: Matrix Int
matrix = (fff,4,4)
fff:: Int->Int->Int
fff i j = ([[0,1,1,1],
[1,2,3,4],
[3,3,4,5],
[10,11,12,13]]!!i)!!j
| chaoxu/haskell-algorithm | MatrixRankSelection.hs | mit | 2,220 | 0 | 14 | 631 | 1,121 | 607 | 514 | 44 | 2 |
-- ------------------------------------------------------ --
-- Copyright © 2014 AlephCloud Systems, Inc.
-- ------------------------------------------------------ --
{-# LANGUAGE CPP #-}
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE OverlappingInstances #-}
{-# LANGUAGE IncoherentInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module BayHac2014.Cryptmail.Json.Instances ()
where
import Data.Monoid.Unicode
import Control.Applicative
import Data.Word
import qualified Data.Set as S
import Prelude.Unicode
import qualified BayHac2014.Cryptmail.Text as T
import BayHac2014.Cryptmail.Json.Types
-- ---------------------------------- --
-- Basic Values
instance ToJSON Value where
toJSON = id
{-# INLINE toJSON #-}
instance FromJSON Value where
parseJSON = pure
{-# INLINE parseJSON #-}
instance ToJSON () where
toJSON _ = Array []
{-# INLINE toJSON #-}
instance FromJSON () where
parseJSON = withArray "()" $ \case
[] → pure ()
_ → fail "Expected an empty array"
{-# INLINE parseJSON #-}
instance ToJSON String where
toJSON = String ∘ T.pack
{-# INLINE toJSON #-}
instance FromJSON String where
parseJSON = withString "String" pure
{-# INLINE parseJSON #-}
instance ToJSON T.Text where
toJSON = String
{-# INLINE toJSON #-}
instance FromJSON T.Text where
parseJSON = withText "Text" pure
{-# INLINE parseJSON #-}
instance ToJSON Bool where
toJSON = Bool
{-# INLINE toJSON #-}
instance FromJSON Bool where
parseJSON = withBool "Bool" pure
{-# INLINE parseJSON #-}
-- ---------------------------------- --
-- Functors (and alike)
instance ToJSON α ⇒ ToJSON (Maybe α) where
toJSON Nothing = Null
toJSON (Just a) = toJSON a
{-# INLINE toJSON #-}
instance FromJSON α ⇒ FromJSON (Maybe α) where
parseJSON Null = pure Nothing
parseJSON a = Just <$> parseJSON a
{-# INLINE parseJSON #-}
instance ToJSON α ⇒ ToJSON [α] where
toJSON = Array ∘ map toJSON
{-# INLINE toJSON #-}
instance FromJSON α ⇒ FromJSON [α] where
parseJSON = withArray "[α]" $ mapM parseJSON
{-# INLINE parseJSON #-}
instance (ToJSON α, ToJSON β) ⇒ ToJSON (α, β) where
toJSON (a, b) = Array $ [toJSON a, toJSON b]
{-# INLINE toJSON #-}
instance (FromJSON α, FromJSON β) ⇒ FromJSON (α, β) where
parseJSON = withArray "(a,b)" $ \case
[a,b] → (,) <$> parseJSON a <*> parseJSON b
l → fail $ "cannot unpack array of length " ⊕ show (length l) ⊕ " into a tupple"
{-# INLINE parseJSON #-}
instance (ToJSON α, ToJSON β, ToJSON γ) ⇒ ToJSON (α, β, γ) where
toJSON (a, b, c) = Array $ [toJSON a, toJSON b, toJSON c]
{-# INLINE toJSON #-}
instance (FromJSON α, FromJSON β, FromJSON γ) ⇒ FromJSON (α, β, γ) where
parseJSON = withArray "(α, β, γ)" $ \case
[a,b,c] → (,,) <$> parseJSON a <*> parseJSON b <*> parseJSON c
l → fail $ "cannot unpack array of length " ⊕ show (length l) ⊕ " into a tripple"
{-# INLINE parseJSON #-}
instance ToJSON α ⇒ ToJSON (S.Set α) where
toJSON = toJSON ∘ S.toList
{-# INLINE toJSON #-}
instance (Ord α, FromJSON α) ⇒ FromJSON (S.Set α) where
parseJSON = fmap S.fromList ∘ parseJSON
{-# INLINE parseJSON #-}
instance (ToJSON α, ToJSON β) ⇒ ToJSON (Either α β) where
toJSON (Left a) = object [jsleft .= a]
toJSON (Right b) = object [jsright .= b]
{-# INLINE toJSON #-}
instance (FromJSON α, FromJSON β) ⇒ FromJSON (Either α β) where
parseJSON (Object [(key, value)])
| key ≡ jsleft = Left <$> parseJSON value
| key ≡ jsright = Right <$> parseJSON value
parseJSON _ = fail ""
{-# INLINE parseJSON #-}
jsleft, jsright ∷ T.Text
jsleft = "Left"
jsright = "Right"
{-# INLINE jsleft #-}
{-# INLINE jsright #-}
-- ---------------------------------- --
-- Numbers
numToJson ∷ Integral α ⇒ α → Value
numToJson = Number ∘ fromInteger ∘ toInteger
{-# INLINE numToJson #-}
instance ToJSON Number where
toJSON = Number
{-# INLINE toJSON #-}
instance FromJSON Number where
parseJSON (Number n) = pure n
parseJSON Null = pure (D (0/0))
parseJSON v = typeMismatch "Number" v
{-# INLINE parseJSON #-}
instance ToJSON Integer where
toJSON = Number ∘ fromInteger
{-# INLINE toJSON #-}
instance FromJSON Integer where
parseJSON = withNumber "Integer" $ return ∘ floor
{-# INLINE parseJSON #-}
instance ToJSON Int where
toJSON = numToJson
{-# INLINE toJSON #-}
instance FromJSON Int where
parseJSON = withNumber "Integral" $ return ∘ floor
{-# INLINE parseJSON #-}
instance ToJSON Word8 where
toJSON = numToJson
{-# INLINE toJSON #-}
instance FromJSON Word8 where
parseJSON = withNumber "Word8" $ return ∘ floor
{-# INLINE parseJSON #-}
instance ToJSON Word16 where
toJSON = numToJson
{-# INLINE toJSON #-}
instance FromJSON Word16 where
parseJSON = withNumber "Word16" $ return ∘ floor
{-# INLINE parseJSON #-}
instance ToJSON Word32 where
toJSON = numToJson
{-# INLINE toJSON #-}
instance FromJSON Word32 where
parseJSON = withNumber "Word32" $ return ∘ floor
{-# INLINE parseJSON #-}
instance ToJSON Word64 where
toJSON = numToJson
{-# INLINE toJSON #-}
instance FromJSON Word64 where
parseJSON = withNumber "Word64" $ return ∘ floor
{-# INLINE parseJSON #-}
instance ToJSON Double where
toJSON = Number ∘ D
{-# INLINE toJSON #-}
instance FromJSON Double where
parseJSON (Number n) = case n of
D d → pure d
I i → pure (fromIntegral i)
parseJSON Null = pure (0/0)
parseJSON v = typeMismatch "Double" v
{-# INLINE parseJSON #-}
| alephcloud/bayhac2014 | src/BayHac2014/Cryptmail/Json/Instances.hs | mit | 6,101 | 0 | 14 | 1,352 | 1,541 | 825 | 716 | 162 | 1 |
{-# htermination min :: Int -> Int -> Int #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_min_5.hs | mit | 46 | 0 | 2 | 10 | 3 | 2 | 1 | 1 | 0 |
-- | Old compilers, will not compile with newer language changes.
-- =======================================================================================
-- | First compiler turns everything into a dynamic type. Uses a lot of unnecessary
-- | casting to work though.
compileDyn :: DL.Exp -> StaticExp Dynamic
compileDyn (DL.Lam str exp) =
To $ Lam str (compileDyn exp)
-- No need to To we already return a Dynamic after function application.
compileDyn (f DL.:@ exp) =
From (compileDyn f) :@ (compileDyn exp)
compileDyn (DL.Bin op exp1 exp2) =
To $ IntBin op sExp1 sExp2
where sExp1 = From $ compileDyn exp1
sExp2 = From $ compileDyn exp2
compileDyn (DL.Lit n) = To (IntLit n)
compileDyn (DL.Var str) = Var str
-- the "compileDyn exps" are both of type Dynamic so the entire expression has
-- type Dynamic.
compileDyn (DL.If cond exp1 exp2) =
If (compileDyn cond) (compileDyn exp1) (compileDyn exp2)
-- =======================================================================================
data Result where
Result :: (Typeable t) => StaticExp t -> Result
printResults :: Result -> IO()
printResults (Result exp) = putStrLn (show exp)
-- | The issue with `compileDyn` is the return type of all cases must be the same
-- | StaticExp t, so t must be Dynamic. Instead we wrap the results in type.
compileRes :: DL.Exp -> Result
-- The second argument of a Lam must be dynamic.
compileRes (DL.Lam str exp) = case (compileRes exp) of
Result exp' -> Result $ Lam str (ensureDyn exp')
compileRes (f DL.:@ exp) =
case (compileRes f, compileRes exp) of
(Result f', Result exp') ->
Result (castType f' :@ (ensureDyn exp') :: StaticExp Dynamic)
compileRes (DL.Bin op exp1 exp2) =
case (compileRes exp1, compileRes exp2) of
-- Bin requires the type of both operators to be the same and that's the
-- type of the whole expression. After compileRes there is no guarantee the
-- types are the same so we must explicitly cast them if they are the same.
(Result exp1', Result exp2') -> Result $ Bin op (castType exp1') (castType exp2')
compileRes (DL.Lit n) = Result (Lit n)
compileRes (DL.Var str) = Result (Var str)
compileRes (DL.If cond exp1 exp2) =
case (compileRes cond, compileRes exp1, compileRes exp2) of
-- We expect the type of both branches of an If to be the same, else it may not
-- necessarly be wrong as one branch may return a dynamic. So we wrap both sides
-- in dynamics.
(Result cond',
-- Get the type of both expressions to compare.
Result (exp1' :: StaticExp a),
Result (exp2' :: StaticExp b)) ->
case eqT (typeRep :: TypeRep a) (typeRep :: TypeRep b) of
Just Refl -> Result $ If (ensureDyn cond') exp1' exp2'
Nothing -> Result $ If (ensureDyn cond') (ensureDyn exp1') (ensureDyn exp2')
-- We want to cast from an 'a' to a 'b' but our types are wrapped in a
-- StaticExp so we use gcast!
-- gcast :: forall a b c. (Typeable a, Typeable b) => c a -> Maybe (c b)
-- | Given an StaticExp t1 we attempt to cast to a t2 if the types are the same.
-- | If it is a Dynamic we add a call to From and defer this check to run time.
-- | Otherwise we for sure have an error.
castType :: (Typeable t1, Typeable t2) => StaticExp t1 -> StaticExp t2
castType t = case gcast t of
Just i -> i
Nothing -> case gcast t of
Just i -> From i
Nothing -> error $ "castType: They type of " ++ show t ++
" does not match t2 or Dynamic."
-- | Given a typeable value check if it's Dynamic, else we add turn in into a Dynamic
-- | by wrapping it in a To.
ensureDyn :: forall t. Typeable t => StaticExp t -> StaticExp Dynamic
ensureDyn val = case (eqT (typeRep :: TypeRep t) (typeRep :: TypeRep Dynamic)) of
Just Refl -> val
Nothing -> To val
testCompileRes :: DL.Exp -> StaticExp Int
testCompileRes e = case compileRes e of
(Result e') -> castType e'
-- =======================================================================================
| plclub/cis670-16fa | projects/DynamicLang/src/OldCompilers.hs | mit | 4,098 | 0 | 14 | 949 | 986 | 497 | 489 | -1 | -1 |
module Main where
import Game.Poker.Simple
main :: IO ()
main = putStrLn "Hello World"
| tobynet/java-poker | app/Main.hs | mit | 88 | 0 | 6 | 15 | 28 | 16 | 12 | 4 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE QuasiQuotes #-}
{- |
Module : Language.Egison.Math.Expr
Licence : MIT
This module defines the internal representation of mathematic objects such as
polynominals, and some useful patterns.
-}
module Language.Egison.Math.Expr
( ScalarData (..)
, PolyExpr (..)
, TermExpr (..)
, Monomial
, SymbolExpr (..)
, Printable (..)
, pattern ZeroExpr
, pattern SingleSymbol
, pattern SingleTerm
, ScalarM (..)
, TermM (..)
, SymbolM (..)
, term
, termM
, symbol
, symbolM
, func
, funcM
, apply
, applyM
, quote
, negQuote
, negQuoteM
, equalMonomial
, equalMonomialM
, zero
, zeroM
, singleTerm
, singleTermM
, mathScalarMult
, mathNegate
) where
import Data.List (intercalate)
import Prelude hiding (foldr, mappend, mconcat)
import Control.Egison
import Control.Monad (MonadPlus (..))
import Language.Egison.IExpr (Index (..))
--
-- Data
--
data ScalarData
= Div PolyExpr PolyExpr
deriving Eq
newtype PolyExpr
= Plus [TermExpr]
data TermExpr
= Term Integer Monomial
-- We choose the definition 'monomials' without its coefficients.
-- ex. 2 x^2 y^3 is *not* a monomial. x^2 t^3 is a monomial.
type Monomial = [(SymbolExpr, Integer)]
data SymbolExpr
= Symbol Id String [Index ScalarData]
| Apply ScalarData [ScalarData]
| Quote ScalarData
| FunctionData ScalarData [ScalarData] [ScalarData] -- fnname argnames args
deriving Eq
type Id = String
-- Matchers
data ScalarM = ScalarM
instance Matcher ScalarM ScalarData
data TermM = TermM
instance Matcher TermM TermExpr
data SymbolM = SymbolM
instance Matcher SymbolM SymbolExpr
term :: Pattern (PP Integer, PP Monomial) TermM TermExpr (Integer, Monomial)
term _ _ (Term a mono) = pure (a, mono)
termM :: TermM -> TermExpr -> (Eql, Multiset (Pair SymbolM Eql))
termM TermM _ = (Eql, Multiset (Pair SymbolM Eql))
symbol :: Pattern (PP String) SymbolM SymbolExpr String
symbol _ _ (Symbol _ name []) = pure name
symbol _ _ _ = mzero
symbolM :: SymbolM -> p -> Eql
symbolM SymbolM _ = Eql
func :: Pattern (PP ScalarData, PP [ScalarData])
SymbolM SymbolExpr (ScalarData, [ScalarData])
func _ _ (FunctionData name _ args) = pure (name, args)
func _ _ _ = mzero
funcM :: SymbolM -> SymbolExpr -> (ScalarM, List ScalarM)
funcM SymbolM _ = (ScalarM, List ScalarM)
apply :: Pattern (PP String, PP [ScalarData]) SymbolM SymbolExpr (String, [ScalarData])
apply _ _ (Apply (SingleSymbol (Symbol _ fn _)) args) = pure (fn, args)
apply _ _ _ = mzero
applyM :: SymbolM -> p -> (Eql, List ScalarM)
applyM SymbolM _ = (Eql, List ScalarM)
quote :: Pattern (PP ScalarData) SymbolM SymbolExpr ScalarData
quote _ _ (Quote m) = pure m
quote _ _ _ = mzero
negQuote :: Pattern (PP ScalarData) SymbolM SymbolExpr ScalarData
negQuote _ _ (Quote m) = pure (mathNegate m)
negQuote _ _ _ = mzero
negQuoteM :: SymbolM -> p -> ScalarM
negQuoteM SymbolM _ = ScalarM
equalMonomial :: Pattern (PP Integer, PP Monomial) (Multiset (Pair SymbolM Eql)) Monomial (Integer, Monomial)
equalMonomial (_, VP xs) _ ys = case isEqualMonomial xs ys of
Just sgn -> pure (sgn, xs)
Nothing -> mzero
equalMonomial _ _ _ = mzero
equalMonomialM :: Multiset (Pair SymbolM Eql) -> p -> (Eql, Multiset (Pair SymbolM Eql))
equalMonomialM (Multiset (Pair SymbolM Eql)) _ = (Eql, Multiset (Pair SymbolM Eql))
zero :: Pattern () ScalarM ScalarData ()
zero _ _ (Div (Plus []) _) = pure ()
zero _ _ _ = mzero
zeroM :: ScalarM -> p -> ()
zeroM ScalarM _ = ()
singleTerm :: Pattern (PP Integer, PP Integer, PP Monomial) ScalarM ScalarData (Integer, Integer, Monomial)
singleTerm _ _ (Div (Plus [Term c mono]) (Plus [Term c2 []])) = pure (c, c2, mono)
singleTerm _ _ _ = mzero
singleTermM :: ScalarM -> p -> (Eql, Eql, Multiset (Pair SymbolM Eql))
singleTermM ScalarM _ = (Eql, Eql, Multiset (Pair SymbolM Eql))
instance ValuePattern ScalarM ScalarData where
value e () ScalarM v = if e == v then pure () else mzero
instance ValuePattern SymbolM SymbolExpr where
value e () SymbolM v = if e == v then pure () else mzero
pattern ZeroExpr :: ScalarData
pattern ZeroExpr = (Div (Plus []) (Plus [Term 1 []]))
pattern SingleSymbol :: SymbolExpr -> ScalarData
pattern SingleSymbol sym = Div (Plus [Term 1 [(sym, 1)]]) (Plus [Term 1 []])
-- Product of a coefficient and a monomial
pattern SingleTerm :: Integer -> Monomial -> ScalarData
pattern SingleTerm coeff mono = Div (Plus [Term coeff mono]) (Plus [Term 1 []])
instance Eq PolyExpr where
Plus xs == Plus ys =
match dfs ys (Multiset Eql)
[ [mc| #xs -> True |]
, [mc| _ -> False |] ]
instance Eq TermExpr where
Term a xs == Term b ys
| a == b = isEqualMonomial xs ys == Just 1
| a == -b = isEqualMonomial xs ys == Just (-1)
| otherwise = False
isEqualMonomial :: Monomial -> Monomial -> Maybe Integer
isEqualMonomial xs ys =
match dfs (xs, ys) (Pair (Multiset (Pair SymbolM Eql)) (Multiset (Pair SymbolM Eql)))
[ [mc| ((quote $s, $n) : $xss, (negQuote #s, #n) : $yss) ->
case isEqualMonomial xss yss of
Nothing -> Nothing
Just sgn -> return (if even n then sgn else - sgn) |]
, [mc| (($x, $n) : $xss, (#x, #n) : $yss) -> isEqualMonomial xss yss |]
, [mc| ([], []) -> return 1 |]
, [mc| _ -> Nothing |]
]
--
-- Arithmetic operations
--
mathScalarMult :: Integer -> ScalarData -> ScalarData
mathScalarMult c (Div m n) = Div (f c m) n
where
f c (Plus ts) = Plus (map (\(Term a xs) -> Term (c * a) xs) ts)
mathNegate :: ScalarData -> ScalarData
mathNegate = mathScalarMult (-1)
--
-- Pretty printing
--
class Printable a where
isAtom :: a -> Bool
pretty :: a -> String
pretty' :: Printable a => a -> String
pretty' e | isAtom e = pretty e
pretty' e = "(" ++ pretty e ++ ")"
instance Printable ScalarData where
isAtom (Div p (Plus [Term 1 []])) = isAtom p
isAtom _ = False
pretty (Div p1 (Plus [Term 1 []])) = pretty p1
pretty (Div p1 p2) = pretty'' p1 ++ " / " ++ pretty' p2
where
pretty'' :: PolyExpr -> String
pretty'' p@(Plus [_]) = pretty p
pretty'' p = "(" ++ pretty p ++ ")"
instance Printable PolyExpr where
isAtom (Plus []) = True
isAtom (Plus [Term _ []]) = True
isAtom (Plus [Term 1 [_]]) = True
isAtom _ = False
pretty (Plus []) = "0"
pretty (Plus (t:ts)) = pretty t ++ concatMap withSign ts
where
withSign (Term a xs) | a < 0 = " - " ++ pretty (Term (- a) xs)
withSign t = " + " ++ pretty t
instance Printable SymbolExpr where
isAtom Symbol{} = True
isAtom (Apply _ []) = True
isAtom Quote{} = True
isAtom _ = False
pretty (Symbol _ (':':':':':':_) []) = "#"
pretty (Symbol _ s []) = s
pretty (Symbol _ s js) = s ++ concatMap show js
pretty (Apply fn mExprs) = unwords (map pretty' (fn : mExprs))
pretty (Quote mExprs) = "`" ++ pretty' mExprs
pretty (FunctionData name _ _) = pretty name
instance Printable TermExpr where
isAtom (Term _ []) = True
isAtom (Term 1 [_]) = True
isAtom _ = False
pretty (Term a []) = show a
pretty (Term 1 xs) = intercalate " * " (map prettyPoweredSymbol xs)
pretty (Term (-1) xs) = "- " ++ intercalate " * " (map prettyPoweredSymbol xs)
pretty (Term a xs) = intercalate " * " (show a : map prettyPoweredSymbol xs)
prettyPoweredSymbol :: (SymbolExpr, Integer) -> String
prettyPoweredSymbol (x, 1) = show x
prettyPoweredSymbol (x, n) = pretty' x ++ "^" ++ show n
instance Show ScalarData where
show = pretty
instance Show PolyExpr where
show = pretty
instance Show TermExpr where
show = pretty
instance Show SymbolExpr where
show = pretty
instance {-# OVERLAPPING #-} Show (Index ScalarData) where
show (Sup i) = "~" ++ pretty' i
show (Sub i) = "_" ++ pretty' i
show (SupSub i) = "~_" ++ pretty' i
show (DF _ _) = ""
show (User i) = "|" ++ pretty' i
| egison/egison | hs-src/Language/Egison/Math/Expr.hs | mit | 8,536 | 0 | 14 | 2,367 | 3,131 | 1,644 | 1,487 | 198 | 2 |
{-# LANGUAGE ConstraintKinds #-}
module Mem where
import Control.Applicative
import Data.Traversable
import SplitEval
memInt :: (Int -> a) -> (Int -> a)
memInt f = (map f [0..] !!)
memIntA :: (Applicative m) => (Int -> m a) -> m (Int -> a)
memIntA f = (!!) <$> traverse f [0..]
memIntS ::
(Applicative n, MonadEval s n, Functor m, MonadSplit s m) =>
(Int -> n a) -> m (Int -> a)
memIntS = evalS . memIntA
| vladfi1/hs-misc | PFP/Mem.hs | mit | 416 | 0 | 9 | 89 | 190 | 104 | 86 | 13 | 1 |
{-# LANGUAGE CPP #-}
module Stackage.Config where
import Control.Monad (when, unless)
import Control.Monad.Trans.Writer (Writer, execWriter, tell)
import Data.Char (toLower)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import Data.Set (fromList, singleton)
import Distribution.Text (simpleParse)
import Stackage.Types
-- | Packages which are shipped with GHC but are not included in the
-- Haskell Platform list of core packages.
defaultExtraCore :: GhcMajorVersion -> Set PackageName
defaultExtraCore _ = fromList $ map PackageName $ words
"binary Win32 ghc-prim integer-gmp"
-- | Test suites which are expected to fail for some reason. The test suite
-- will still be run and logs kept, but a failure will not indicate an
-- error in our package combination.
defaultExpectedFailures :: GhcMajorVersion
-> Bool -- ^ haskell platform
-> Set PackageName
defaultExpectedFailures ghcVer requireHP = execWriter $ do
-- Requires an old version of WAI and Warp for tests
add "HTTP"
-- text and setenv have recursive dependencies in their tests, which
-- cabal can't (yet) handle
add "text"
add "setenv"
-- The version of GLUT included with the HP does not generate
-- documentation correctly.
add "GLUT"
-- https://github.com/bos/statistics/issues/42
add "statistics"
-- https://github.com/kazu-yamamoto/simple-sendfile/pull/10
add "simple-sendfile"
-- http://hackage.haskell.org/trac/hackage/ticket/954
add "diagrams"
-- https://github.com/fpco/stackage/issues/24
add "unix-time"
-- With transformers 0.3, it doesn't provide any modules
add "transformers-compat"
-- Tests require shell script and are incompatible with sandboxed package
-- databases
add "HTF"
-- https://github.com/simonmar/monad-par/issues/28
add "monad-par"
-- Unfortunately network failures seem to happen haphazardly
add "network"
-- https://github.com/ekmett/hyphenation/issues/1
add "hyphenation"
-- Test suite takes too long to run on some systems
add "punycode"
-- http://hub.darcs.net/stepcut/happstack/issue/1
add "happstack-server"
-- Requires a Facebook app.
add "fb"
-- https://github.com/tibbe/hashable/issues/64
add "hashable"
-- https://github.com/vincenthz/language-java/issues/10
add "language-java"
add "threads"
add "crypto-conduit"
add "pandoc"
add "language-ecmascript"
add "hspec"
add "alex"
-- https://github.com/basvandijk/concurrent-extra/issues/
add "concurrent-extra"
-- https://github.com/skogsbaer/xmlgen/issues/2
add "xmlgen"
-- Something very strange going on with the test suite, I can't figure
-- out how to fix it
add "bson"
-- Requires a locally running PostgreSQL server with appropriate users
add "postgresql-simple"
-- Missing files
add "websockets"
-- Some kind of Cabal bug when trying to run tests
add "thyme"
add "shake"
-- https://github.com/jgm/pandoc-citeproc/issues/5
add "pandoc-citeproc"
-- Problems with doctest and sandboxing
add "warp"
add "wai-logger"
-- https://github.com/fpco/stackage/issues/163
add "hTalos"
add "seqloc"
-- https://github.com/bos/math-functions/issues/25
add "math-functions"
-- FIXME the test suite fails fairly regularly in builds, though I haven't
-- discovered why yet
add "crypto-numbers"
-- Test suite is currently failing regularly, needs to be worked out still.
add "lens"
-- Requires too old a version of test-framework
add "time"
-- No code included any more, therefore Haddock fails
mapM_ add $ words =<<
[ "comonad-transformers comonads-fd groupoids"
, "profunctor-extras semigroupoid-extras"
, "hamlet shakespeare-css shakespeare-i18n"
, "shakespeare-js shakespeare-text"
, "attoparsec-conduit blaze-builder-conduit http-client-conduit"
, "network-conduit zlib-conduit http-client-multipart"
, "wai-eventsource wai-test"
, "hspec-discover"
]
-- Cloud Haskell tests seem to be unreliable
mapM_ add $ words =<<
[ "distributed-process lockfree-queue network-transport-tcp"
]
-- Pulls in monad-peel which does not compile
when (ghcVer >= GhcMajorVersion 7 8) $ add "monad-control"
-- https://github.com/fpco/stackage/issues/226
add "options"
-- https://github.com/gtk2hs/gtk2hs/issues/36
add "glib"
add "pango"
-- https://github.com/acw/bytestring-progress/issues/3
add "bytestring-progress"
-- Seems to require 32-bit functions
add "nettle"
-- Depends on a missing graphviz executable
add "graphviz"
-- https://github.com/silkapp/json-schema/issues/8
when (ghcVer <= GhcMajorVersion 7 6) $
add "json-schema"
-- No AWS creds available
add "aws"
-- Not sure why...
add "singletons"
add "hspec2"
add "hspec-wai"
-- Requires too new a version of time
when (ghcVer < GhcMajorVersion 7 8) $ add "cookie"
-- https://github.com/fpco/stackage/issues/285
add "diagrams-haddock"
add "scientific"
add "json-schema"
-- https://github.com/BioHaskell/octree/issues/4
add "Octree"
-- No code until we upgrade to network 2.6
add "network-uri"
-- https://github.com/goldfirere/th-desugar/issues/12
add "th-desugar"
-- https://github.com/haskell/c2hs/issues/108
add "c2hs"
-- https://github.com/jmillikin/haskell-filesystem/issues/3
add "system-filepath"
-- For some unknown reason, doctest has trouble on GHC 7.6. This only
-- happens during a Stackage test.
--
-- See: http://www.reddit.com/r/haskell/comments/2go92u/beginner_error_messages_in_c_vs_haskell/cklaspk
when (ghcVer == GhcMajorVersion 7 6) $ add "http-types"
-- Requires a running webdriver server
add "webdriver"
add "webdriver-snoy"
-- Weird conflicts with sandboxing
add "ghc-mod"
add "ghcid"
-- Requires locally running server
add "bloodhound"
-- Requires PostgreSQL running
add "postgresql-binary"
add "hasql"
add "hasql-postgres"
-- https://github.com/gtk2hs/gtk2hs/issues/79
add "gio"
add "gtk"
-- Requires SAT solver and old QuickCheck
add "ersatz"
when (ghcVer == GhcMajorVersion 7 8 && requireHP) $ do
-- https://github.com/vincenthz/hs-asn1/issues/11
add "asn1-encoding"
-- https://github.com/vincenthz/hs-tls/issues/84
add "tls"
add "x509"
where
add = tell . singleton . PackageName
-- | List of packages for our stable Hackage. All dependencies will be
-- included as well. Please indicate who will be maintaining the package
-- via comments.
defaultStablePackages :: GhcMajorVersion
-> Bool -- ^ using haskell platform?
-> Map PackageName (VersionRange, Maintainer)
defaultStablePackages ghcVer requireHP = unPackageMap $ execWriter $ do
when (ghcVer == GhcMajorVersion 7 8 && requireHP) haskellPlatform78
mapM_ (add "[email protected]") $ words =<<
[ "yesod yesod-newsfeed yesod-sitemap yesod-static yesod-test yesod-bin"
, "markdown mime-mail-ses"
, "persistent persistent-template persistent-sqlite persistent-postgresql persistent-mysql"
, "network-conduit-tls yackage warp-tls keter"
, "process-conduit stm-conduit"
, "classy-prelude-yesod yesod-fay yesod-eventsource wai-websockets"
, "random-shuffle hebrew-time"
, "bzlib-conduit case-insensitive"
, "conduit-extra conduit-combinators yesod-websockets"
, "cabal-src"
, "yesod-auth-deskcom monadcryptorandom sphinx"
, "yesod-gitrepo"
]
-- https://github.com/fpco/stackage/issues/261
addRange "Michael Snoyman" "cabal-install" $
case () of
()
| ghcVer <= GhcMajorVersion 7 6 -> "< 1.17"
| ghcVer <= GhcMajorVersion 7 8 -> "< 1.19"
| otherwise -> "-any"
mapM_ (add "FP Complete <[email protected]>") $ words =<<
[ "web-fpco th-expand-syns configurator smtLib"
, "fixed-list indents language-c pretty-class"
, "csv-conduit cassava"
, "async shelly thyme"
, "hxt hxt-relaxng dimensional"
, "cairo diagrams-cairo gtk2hs-buildtools"
, "base16-bytestring convertible"
, "compdata hybrid-vectors"
, "executable-path formatting quandl-api"
, "fgl hmatrix hmatrix-gsl"
, "alex happy c2hs"
, "fpco-api aws persistent-mongoDB"
, "random-fu lhs2tex"
, "Chart Chart-diagrams histogram-fill random-source"
, "webdriver"
, "foreign-store"
, "statistics-linreg"
-- https://github.com/Soostone/retry/issues/18
-- , "retry"
]
when (ghcVer < GhcMajorVersion 7 8) $ do -- No GHC 7.8 support
mapM_ (add "FP Complete <[email protected]>") $ words =<<
[ "" -- too unreliable for the moment "distributed-process distributed-process-simplelocalnet"
-- https://github.com/fpco/stackage/issues/295
--, "threepenny-gui unification-fd"
]
addRange "FP Complete <[email protected]>" "compdata" "< 0.8"
when (ghcVer >= GhcMajorVersion 7 8 && not requireHP) $
mapM_ (add "FP Complete <[email protected]>") $ words =<<
[ "criterion"
, "th-lift singletons th-desugar quickcheck-assertions"
]
addRange "FP Complete <[email protected]>" "kure" "<= 2.4.10"
mapM_ (add "Omari Norman <[email protected]>") $ words
"barecheck rainbow rainbow-tests"
when (ghcVer >= GhcMajorVersion 7 8 && not requireHP) $
mapM_ (add "Omari Norman <[email protected]>") $ words
"quickpull"
mapM_ (add "Neil Mitchell") $ words
"hlint hoogle shake derive tagsoup cmdargs safe uniplate nsis js-jquery js-flot extra bake ghcid"
mapM_ (add "Alan Zimmerman") $ words
"hjsmin language-javascript"
{-
https://github.com/fpco/stackage/issues/320
when (ghcVer >= GhcMajorVersion 7 8 && not requireHP) $
mapM_ (add "Alfredo Di Napoli <[email protected]>") $ words
"mandrill"
-}
mapM_ (add "Jasper Van der Jeugt") $ words
"blaze-html blaze-markup stylish-haskell"
mapM_ (add "Antoine Latter") $ words
"uuid byteorder"
mapM_ (add "Philipp Middendorf <[email protected]>") $ words
"clock"
mapM_ (add "Stefan Wehr <[email protected]>") $ words
"HTF xmlgen stm-stats"
when (ghcVer < GhcMajorVersion 7 8) $ add "Stefan Wehr <[email protected]>" "hscurses"
mapM_ (add "Bart Massey <[email protected]>") $ words
"parseargs"
mapM_ (add "Vincent Hanquez") $ words =<<
[ "bytedump certificate cipher-aes cipher-rc4 connection"
, "cprng-aes cpu crypto-pubkey-types crypto-random-api cryptocipher"
, "cryptohash hit language-java libgit pem siphash socks tls"
, "tls-debug vhd language-java"
]
mapM_ (add "Chris Done") $ words =<<
[ "ace check-email freenect gd"
, "hostname-validate ini lucid osdkeys pdfinfo"
, "pure-io sourcemap frisby"
-- https://github.com/nominolo/atto-lisp/issues/15
-- , "present"
]
-- Requires older haddock currently
when (ghcVer == GhcMajorVersion 7 8 && requireHP) $
mapM_ (add "Chris Done") $ words =<<
[ "haskell-docs"
]
-- https://github.com/jgoerzen/testpack/issues/10
when (ghcVer >= GhcMajorVersion 7 8 && not requireHP) $
mapM_ (add "Chris Done") $ words =<<
[ "scrobble"
]
-- Requires too new a process for GHC 7.6
when (ghcVer >= GhcMajorVersion 7 8) $
mapM_ (add "Chris Done") $ words =<<
[ "shell-conduit"
]
-- TODO: Add hindent and structured-haskell-mode once they've been ported to HSE 1.16.
#if !defined(mingw32_HOST_OS) && !defined(__MINGW32__)
-- Does not compile on Windows
mapM_ (add "Vincent Hanquez") $ words "udbus xenstore"
#endif
when (ghcVer < GhcMajorVersion 7 8) $
mapM_ (add "Alberto G. Corona <[email protected]>") $ words
"RefSerialize TCache Workflow MFlow"
mapM_ (add "Edward Kmett <[email protected]>") $ words =<<
[ "ad adjunctions bifunctors bound charset comonad comonad-transformers"
, "comonads-fd compressed concurrent-supply constraints contravariant"
, "distributive either eq free groupoids heaps hyphenation"
, "integration intervals kan-extensions lca lens linear monadic-arrays machines"
, "mtl profunctors profunctor-extras reducers reflection"
, "semigroups semigroupoids semigroupoid-extras speculation tagged void"
, "graphs monad-products monad-st wl-pprint-extras wl-pprint-terminfo"
, "numeric-extras parsers pointed prelude-extras reducers"
, "streams vector-instances"
, "approximate bits bytes compensated exceptions fixed gl"
, "half linear-accelerate log-domain"
, "monad-products monad-st nats"
, "ersatz"
-- hyperloglog
]
when (ghcVer < GhcMajorVersion 7 8) $
mapM_ (add "Edward Kmett <[email protected]>") $ words =<<
[ "categories comonad-extras recursion-schemes syb-extras"
]
when (ghcVer >= GhcMajorVersion 7 8 && not requireHP) $
mapM_ (add "Edward Kmett <[email protected]>") $ words =<<
[ "lens-aeson quickpull zlib-lens"
]
-- Temporary upper bound for some of the above packages
addRange "Edward Kmett <[email protected]>" "generic-deriving" "< 1.7"
mapM_ (add "Andrew Farmer <[email protected]>") $ words
"scotty wai-middleware-static"
mapM_ (add "Simon Hengel <[email protected]>") $ words
"hspec hspec-wai hspec-wai-json aeson-qq interpolate doctest base-compat"
mapM_ (add "Mario Blazevic <[email protected]>") $ words
"monad-parallel monad-coroutine incremental-parser monoid-subclasses"
mapM_ (add "Brent Yorgey <[email protected]>") $ words =<<
[ "monoid-extras dual-tree vector-space-points active force-layout"
, "diagrams diagrams-contrib diagrams-core diagrams-lib diagrams-svg"
, "diagrams-postscript haxr"
, "BlogLiterately"
, "MonadRandom"
, "diagrams-builder diagrams-haddock BlogLiterately-diagrams"
]
mapM_ (add "Vincent Berthoux <[email protected]>") $ words
"JuicyPixels"
mapM_ (add "Patrick Brisbin") $ words "gravatar"
-- https://github.com/fpco/stackage/issues/299
-- mapM_ (add "Paul Harper <[email protected]>") $ words "yesod-auth-oauth2"
mapM_ (add "Felipe Lessa <[email protected]>") $ words
"esqueleto fb fb-persistent yesod-fb yesod-auth-fb"
mapM_ (add "Alexander Altman <[email protected]>") $ words
"base-unicode-symbols containers-unicode-symbols"
if ghcVer >= GhcMajorVersion 7 8
then add "Ryan Newton <[email protected]>" "accelerate"
else addRange "Ryan Newton <[email protected]>" "accelerate" "< 0.15"
mapM_ (add "Dan Burton <[email protected]>") $ words =<<
[ "basic-prelude composition io-memoize numbers rev-state runmemo"
, "tardis lens-family-th"
]
mapM_ (add "Daniel Díaz <[email protected]>") $ words
"HaTeX matrix"
when (ghcVer >= GhcMajorVersion 7 8) $
mapM_ (add "Daniel Díaz <[email protected]>") $ words
"binary-list"
mapM_ (add "Gabriel Gonzalez <[email protected]>")
["pipes", "pipes-parse", "pipes-concurrency"]
when (ghcVer >= GhcMajorVersion 7 8) $
mapM_ (add "Chris Allen <[email protected]>")
["bloodhound"]
mapM_ (add "Adam Bergmark <[email protected]>") $ words
"fay fay-base fay-dom fay-jquery fay-text fay-uri snaplet-fay"
when (ghcVer >= GhcMajorVersion 7 8 && not requireHP) $
mapM_ (add "Rodrigo Setti <[email protected]>") $ words
"messagepack messagepack-rpc"
mapM_ (add "Boris Lykah <[email protected]>") $ words
"groundhog groundhog-th groundhog-sqlite groundhog-postgresql groundhog-mysql"
mapM_ (add "Janne Hellsten <[email protected]>") $ words
"sqlite-simple"
mapM_ (add "Michal J. Gajda") $ words
"iterable Octree FenwickTree"
-- https://github.com/BioHaskell/hPDB/issues/2
when (ghcVer >= GhcMajorVersion 7 8) $ do
mapM_ (add "Michal J. Gajda") $ words
"hPDB hPDB-examples"
mapM_ (add "Roman Cheplyaka <[email protected]>") $ words =<<
[ "smallcheck tasty tasty-smallcheck tasty-quickcheck tasty-hunit tasty-golden"
, "traverse-with-class regex-applicative time-lens"
, "haskell-names haskell-packages hse-cpp"
]
mapM_ (add "George Giorgidze <[email protected]>") $ words
"HCodecs YampaSynth"
mapM_ (add "Phil Hargett <[email protected]>") $ words
"courier"
#if !defined(mingw32_HOST_OS) && !defined(__MINGW32__)
mapM_ (add "Aycan iRiCAN <[email protected]>") $ words
"hdaemonize hsyslog hweblib"
#else
mapM_ (add "Aycan iRiCAN <[email protected]>") $ words
"hweblib"
#endif
mapM_ (add "Joachim Breitner <[email protected]>") $ words
"circle-packing arbtt"
when (ghcVer >= GhcMajorVersion 7 8) $
mapM_ (add "Joachim Breitner <[email protected]>") $ words
"ghc-heap-view"
when (ghcVer < GhcMajorVersion 7 8) $
mapM_ (add "John Wiegley") $ words =<<
[ "bindings-DSL github monad-extras numbers"
]
mapM_ (add "Aditya Bhargava <[email protected]") $ words
"HandsomeSoup"
mapM_ (add "Clint Adams <[email protected]>") $ words
"hOpenPGP openpgp-asciiarmor MusicBrainz DAV hopenpgp-tools"
-- https://github.com/fpco/stackage/issues/160
mapM_ (add "Ketil Malde") $ words =<<
[ "biocore biofasta biofastq biosff"
, "blastxml bioace biophd"
, "biopsl" -- https://github.com/ingolia/SamTools/issues/3 samtools
, "seqloc bioalign BlastHTTP"
-- The following have out-of-date dependencies currently
-- biostockholm memexml RNAwolf
-- , "Biobase BiobaseDotP BiobaseFR3D BiobaseInfernal BiobaseMAF"
-- , "BiobaseTrainingData BiobaseTurner BiobaseXNA BiobaseVienna"
-- , "BiobaseTypes BiobaseFasta"
-- MC-Fold-DP
]
-- https://github.com/fpco/stackage/issues/163
addRange "Michael Snoyman" "biophd" "< 0.0.6 || > 0.0.6"
mapM_ (add "Silk <[email protected]>") $ words =<<
[ "arrow-list attoparsec-expr bumper code-builder fay-builder"
, "hxt-pickle-utils multipart regular-xmlpickler"
, "tostring uri-encode imagesize-conduit"
]
when (ghcVer >= GhcMajorVersion 7 8 && not requireHP) $ do
mapM_ (add "Silk <[email protected]>") $ words =<<
[ "aeson-utils generic-aeson json-schema"
, "rest-client rest-core rest-gen rest-happstack rest-snap rest-stringmap"
, "rest-types rest-wai tostring uri-encode imagesize-conduit"
]
mapM_ (add "Simon Michael <[email protected]>") $ words
"hledger"
mapM_ (add "Mihai Maruseac <[email protected]>") $ words
"io-manager"
mapM_ (add "Dimitri Sabadie <[email protected]") $ words
"monad-journal"
mapM_ (add "Thomas Schilling <[email protected]>") $ words
"ghc-syb-utils"
mapM_ (add "Boris Buliga <[email protected]>") $ words
"ghc-mod io-choice"
when (ghcVer >= GhcMajorVersion 7 8) $
mapM_ (add "Boris Buliga <[email protected]>") $ words
"system-canonicalpath"
when (ghcVer >= GhcMajorVersion 7 8) $
mapM_ (add "Yann Esposito <[email protected]>") $ words
"holy-project"
when requireHP $ addRange "Yann Esposito <[email protected]>" "holy-project" "< 0.1.1.1"
mapM_ (add "Paul Rouse <[email protected]>") $ words
"yesod-auth-hashdb"
add "Toralf Wittner <[email protected]>" "zeromq4-haskell"
mapM_ (add "[email protected]") $ words
"djinn-lib djinn-ghc"
mapM_ (add "Arash Rouhani <[email protected]>") $ words
"yesod-text-markdown"
mapM_ (add "Matvey Aksenov <[email protected]") $ words
"terminal-size"
mapM_ (add "Luis G. Torres <[email protected]") $ words
"kdt"
{- https://github.com/fpco/stackage/pull/331
mapM_ (add "Jyotirmoy Bhattacharya <[email protected]") $ words
"hakyll"
-}
mapM_ (add "Emanuel Borsobom <[email protected]>") $ words
"text-binary"
when (ghcVer >= GhcMajorVersion 7 8) $
mapM_ (add "Emanuel Borsobom <[email protected]>") $ words
"haddock-api"
mapM_ (add "Michael Sloan <[email protected]") $ words
"th-orphans th-reify-many"
when (ghcVer == GhcMajorVersion 7 8 && not requireHP) $
mapM_ (add "Michael Snoyman") $ words =<<
[ "repa repa-io repa-algorithms repa-devil JuicyPixels-repa"
]
when (ghcVer >= GhcMajorVersion 7 8 && not requireHP) $ do
mapM_ (add "Nikita Volkov <[email protected]>") $
words "hasql hasql-postgres hasql-backend postgresql-binary" ++
words "stm-containers focus list-t slave-thread partial-handler" ++
words "neat-interpolation cases" ++
words "base-prelude mtl-prelude"
addRange "Nikita Volkov <[email protected]>" "mtl-prelude" "< 2"
mapM_ (add "Iustin Pop <[email protected]>") $ words
"prefix-units"
-- https://github.com/fpco/stackage/issues/217
addRange "Michael Snoyman" "transformers" "< 0.4"
addRange "Michael Snoyman" "mtl" "< 2.2"
addRange "Michael Snoyman" "lifted-base" "< 0.2.2.2"
-- https://github.com/fpco/stackage/issues/224
when (ghcVer <= GhcMajorVersion 7 6) $ do
addRange "Michael Snoyman" "zip-archive" "== 0.2.2.1"
addRange "Michael Snoyman" "pandoc" "== 1.12.4.2"
addRange "Michael Snoyman" "texmath" "<= 0.6.6.3"
addRange "Michael Snoyman" "attoparsec" "== 0.11.3.1"
addRange "Michael Snoyman" "parsers" "< 0.11"
addRange "Michael Snoyman" "scientific" "< 0.3"
addRange "Michael Snoyman" "aeson" "< 0.7.0.5"
addRange "Michael Snoyman" "aeson-utils" "< 0.2.2"
addRange "Michael Snoyman" "formatting" "< 5"
addRange "Michael Snoyman" "aws" "< 0.10"
addRange "Michael Snoyman" "network" "< 2.6"
addRange "Michael Snoyman" "network-uri" "< 2.6"
-- 0.16.2 fixes dependency issues with different version of GHC
-- and Haskell Platform. Now builds on GHC 7.4-7.8. Version 1.0 is
-- guaranteed to break the API. See
-- https://travis-ci.org/jswebtools/language-ecmascript for
-- current build status.
addRange "Andrey Chudnov <[email protected]>" "language-ecmascript" ">= 0.16.2 && < 1.0"
-- https://github.com/fpco/stackage/issues/271
when (ghcVer < GhcMajorVersion 7 8) $
addRange "Michael Snoyman" "aeson" "< 0.8"
-- https://github.com/fpco/stackage/issues/279
addRange "Michael Snoyman" "MonadRandom" "< 0.2"
-- https://github.com/fpco/stackage/issues/288
addRange "Michael Snoyman" "text" "< 1.2"
-- Force a specific version that's compatible with transformers 0.3
addRange "Michael Snoyman" "transformers-compat" "== 0.3.3.3"
-- https://github.com/fpco/stackage/issues/291
addRange "Michael Snoyman" "random" "< 1.0.1.3"
-- https://github.com/fpco/stackage/issues/314
addRange "Michael Snoyman" "hxt" "< 9.3.1.9"
-- https://github.com/fpco/stackage/issues/318
addRange "Michael Snoyman" "HaXml" "< 1.25"
-- https://github.com/fpco/stackage/issues/319
addRange "Michael Snoyman" "polyparse" "< 1.10"
-- https://github.com/fpco/stackage/issues/341
addRange "Michael Snoyman" "haskell-names" "< 0.5"
when (ghcVer == GhcMajorVersion 7 8 && requireHP) $ do
-- Yay workarounds for unnecessarily old versions
let peg x y = addRange "Haskell Platform" x y
peg "aeson" "== 0.7.0.4"
peg "scientific" "== 0.2.0.2"
peg "criterion" "<= 0.8.1.0"
peg "tasty-quickcheck" "< 0.8.0.3"
peg "formatting" "< 5.0"
peg "parsers" "< 0.11"
peg "lens" "< 4.2"
peg "contravariant" "< 1"
peg "adjunctions" "< 4.2"
peg "kan-extensions" "< 4.1"
peg "semigroupoids" "< 4.1"
peg "aws" "< 0.10"
peg "pandoc" "< 1.13"
peg "texmath" "<= 0.6.6.3"
peg "checkers" "== 0.3.2"
peg "HandsomeSoup" "< 0.3.3"
peg "network-uri" "< 2.6"
add :: String -> String -> Writer PackageMap ()
add maintainer package = addRange maintainer package "-any"
addRange :: String -> String -> String -> Writer PackageMap ()
addRange maintainer package range =
case simpleParse range of
Nothing -> error $ "Invalid range " ++ show range ++ " for " ++ package
Just range' -> tell $ PackageMap $ Map.singleton (PackageName package) (range', Maintainer maintainer)
-- | Hard coded Haskell Platform versions
haskellPlatform78 :: Writer PackageMap ()
haskellPlatform78 = do
addRange "Haskell Platform" "ghc" "== 7.8.3"
addRange "Haskell Platform" "haddock" "== 2.14.3"
addRange "Haskell Platform" "array" "== 0.5.0.0"
addRange "Haskell Platform" "base" "== 4.7.0.1"
addRange "Haskell Platform" "bytestring" "== 0.10.4.0"
addRange "Haskell Platform" "Cabal" "== 1.18.1.3"
addRange "Haskell Platform" "containers" "== 0.5.5.1"
addRange "Haskell Platform" "deepseq" "== 1.3.0.2"
addRange "Haskell Platform" "directory" "== 1.2.1.0"
addRange "Haskell Platform" "filepath" "== 1.3.0.2"
addRange "Haskell Platform" "haskell2010" "== 1.1.2.0"
addRange "Haskell Platform" "haskell98" "== 2.0.0.3"
addRange "Haskell Platform" "hpc" "== 0.6.0.1"
addRange "Haskell Platform" "old-locale" "== 1.0.0.6"
addRange "Haskell Platform" "old-time" "== 1.1.0.2"
addRange "Haskell Platform" "pretty" "== 1.1.1.1"
addRange "Haskell Platform" "process" "== 1.2.0.0"
addRange "Haskell Platform" "template-haskell" "== 2.9.0.0"
addRange "Haskell Platform" "time" "== 1.4.2"
addRange "Haskell Platform" "transformers" "== 0.3.0.0"
addRange "Haskell Platform" "unix" "== 2.7.0.1"
addRange "Haskell Platform" "xhtml" "== 3000.2.1"
addRange "Haskell Platform" "async" "== 2.0.1.5"
addRange "Haskell Platform" "attoparsec" "== 0.10.4.0"
addRange "Haskell Platform" "case-insensitive" "== 1.1.0.3"
addRange "Haskell Platform" "fgl" "== 5.5.0.1"
addRange "Haskell Platform" "GLURaw" "== 1.4.0.1"
addRange "Haskell Platform" "GLUT" "== 2.5.1.1"
addRange "Haskell Platform" "hashable" "== 1.2.2.0"
addRange "Haskell Platform" "haskell-src" "== 1.0.1.6"
addRange "Haskell Platform" "html" "== 1.0.1.2"
addRange "Haskell Platform" "HTTP" "== 4000.2.10"
addRange "Haskell Platform" "HUnit" "== 1.2.5.2"
addRange "Haskell Platform" "mtl" "== 2.1.3.1"
addRange "Haskell Platform" "network" "== 2.4.2.3"
addRange "Haskell Platform" "OpenGL" "== 2.9.2.0"
addRange "Haskell Platform" "OpenGLRaw" "== 1.5.0.0"
addRange "Haskell Platform" "parallel" "== 3.2.0.4"
addRange "Haskell Platform" "parsec" "== 3.1.5"
addRange "Haskell Platform" "primitive" "== 0.5.2.1"
addRange "Haskell Platform" "QuickCheck" "== 2.6"
addRange "Haskell Platform" "random" "== 1.0.1.1"
addRange "Haskell Platform" "regex-base" "== 0.93.2"
addRange "Haskell Platform" "regex-compat" "== 0.95.1"
addRange "Haskell Platform" "regex-posix" "== 0.95.2"
addRange "Haskell Platform" "split" "== 0.2.2"
addRange "Haskell Platform" "stm" "== 2.4.2"
addRange "Haskell Platform" "syb" "== 0.4.1"
addRange "Haskell Platform" "text" "== 1.1.0.0"
addRange "Haskell Platform" "transformers" "== 0.3.0.0"
addRange "Haskell Platform" "unordered-containers" "== 0.2.4.0"
addRange "Haskell Platform" "vector" "== 0.10.9.1"
addRange "Haskell Platform" "xhtml" "== 3000.2.1"
addRange "Haskell Platform" "zlib" "== 0.5.4.1"
addRange "Haskell Platform" "alex" "== 3.1.3"
addRange "Haskell Platform" "cabal-install" "== 1.18.0.5"
addRange "Haskell Platform" "happy" "== 1.19.4"
addRange "Haskell Platform" "hscolour" "== 1.20.3"
-- | Replacement Github users. This is useful when a project is owned by an
-- organization. It also lets you ping multiple users.
--
-- Note that cross organization team mentions aren't allowed by Github.
convertGithubUser :: String -> [String]
convertGithubUser x =
fromMaybe [x] $ Map.lookup (map toLower x) pairs
where
pairs = Map.fromList
[ ("diagrams", ["byorgey", "fryguybob", "jeffreyrosenbluth", "bergey"])
, ("yesodweb", ["snoyberg"])
, ("fpco", ["snoyberg"])
, ("faylang", ["bergmark"])
, ("silkapp", ["bergmark", "hesselink"])
, ("snapframework",["mightybyte"])
, ("haskell-ro", ["mihaimaruseac"])
]
| feuerbach/stackage | Stackage/Config.hs | mit | 29,848 | 0 | 17 | 7,185 | 4,676 | 2,164 | 2,512 | 479 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Day15 where
import Data.Either (rights)
import Data.List (sortOn)
import Text.Parsec ((<|>) , Parsec , ParseError)
import qualified Text.Parsec as P
data Disc = Disc
{ discIndex :: Int
, discPositions :: Int
, discOffset :: Int
} deriving (Eq, Show)
parseInput :: String -> [Disc]
parseInput = rights . map (P.parse parseDisc "") . lines
parseDisc :: Parsec String () Disc
parseDisc = Disc <$>
(P.string "Disc #" *> number) <*>
(P.string " has " *> number) <*>
(P.string " positions; at time=0, it is at position " *> number <* P.char '.')
where
number = read <$> P.many1 P.digit
makeRequirement :: Disc -> Requirement
makeRequirement (Disc i p o) = Requirement p ((-(i + o)) `mod` p)
data Requirement = Requirement
{ reqBase :: Int
, reqRemainder :: Int
} deriving (Eq, Ord, Show)
doesSatisfy :: Requirement -> Int -> Bool
doesSatisfy (Requirement p r) i = (i `mod` p) == r
combine :: Requirement -> Requirement -> Requirement
combine r@(Requirement p _) r'@(Requirement p' _) = Requirement (p * p') (satisfy [r, r'])
satisfy :: [Requirement] -> Int
satisfy [] = 0
satisfy (r : []) = reqRemainder r
satisfy ((Requirement p r) : r' : []) = head . filter (doesSatisfy r') $ map (\i -> (i * p) + r) [0..]
satisfy (r : rs) = reqRemainder $ foldl combine r rs
addDisc :: Int -> Int -> [Disc] -> [Disc]
addDisc position offset ds = (Disc (maxIndex + 1) position offset) : ds
where
maxIndex = maximum $ map discIndex ds
-- Final, top-level exports
day15 :: String -> Int
day15 = satisfy . reverse . sortOn reqBase . map makeRequirement . parseInput
day15' :: String -> Int
day15' = satisfy . reverse . sortOn reqBase . map makeRequirement . addDisc 11 0 . parseInput
-- Input
run :: IO ()
run = do
putStrLn "Day 15 results: "
input <- readFile "inputs/day15.txt"
putStrLn $ " " ++ show (day15 input)
putStrLn $ " " ++ show (day15' input)
| brianshourd/adventOfCode2016 | src/Day15.hs | mit | 1,957 | 0 | 11 | 422 | 790 | 420 | 370 | 47 | 1 |
-- This file is part of the 'union-find-array' library. It is licensed
-- under an MIT license. See the accompanying 'LICENSE' file for details.
--
-- Authors: Bertram Felgenhauer
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies #-}
{-# LANGUAGE FlexibleInstances, UndecidableInstances #-}
module Control.Monad.Union.Class (
MonadUnion (..),
) where
import Data.Union.Type (Node (..), Union (..))
import Control.Monad.Trans (MonadTrans (..))
import Prelude hiding (lookup)
class Monad m => MonadUnion l m | m -> l where
-- | Add a new node, with a given label.
new :: l -> m Node
-- | Find the node representing a given node, and its label.
lookup :: Node -> m (Node, l)
-- | Merge two sets. The first argument is a function that takes the labels
-- of the corresponding sets' representatives and computes a new label for
-- the joined set. Returns Nothing if the given nodes are in the same set
-- already.
merge :: (l -> l -> (l, a)) -> Node -> Node -> m (Maybe a)
-- | Re-label a node.
annotate :: Node -> l -> m ()
-- | Flatten the disjoint set forest for faster lookups.
flatten :: m ()
instance (MonadUnion l m, MonadTrans t, Monad (t m)) => MonadUnion l (t m) where
new a = lift $ new a
lookup a = lift $ lookup a
merge a b c = lift $ merge a b c
annotate a b = lift $ annotate a b
flatten = lift $ flatten
| haskell-rewriting/union-find-array | src/Control/Monad/Union/Class.hs | mit | 1,408 | 0 | 12 | 332 | 329 | 185 | 144 | -1 | -1 |
module Data.Smashy.Types where
import Control.Concurrent.STM (TVar, newTVarIO)
import Control.Concurrent.STM.TQueue (TQueue, newTQueueIO)
import Data.Hashable (Hashable, hashWithSalt)
import Data.Vector.Storable (Vector)
import Data.Vector.Storable.MMap
import Data.Vector.Storable.Mutable as VM (IOVector, new)
import qualified STMContainers.Set as S (Set, newIO)
import Data.Word (Word8, Word32)
numBuckets :: Int
numBuckets = 200000
bucketSize :: Int
bucketSize = 1024
newtype Bucket = Bucket (VM.IOVector Word8)
newtype BucketData = BucketData (Vector Word8) deriving Show
data State = State {
resizing :: TVar Bool,
hashTableSize :: TVar Int,
hashTable :: IOVector Word32,
bucketList :: IOVector Word8,
takenHashes :: S.Set Hash,
nextFreeBucket :: TVar BucketId,
freeStore :: TQueue BucketId
}
newState :: IO State
newState = do
let htSize = 200000
buckListSize = numBuckets * bucketSize
resz <- newTVarIO False
hts <- newTVarIO htSize
ht <- VM.new htSize
--ht <- unsafeMMapMVector "./hashtable" ReadWriteEx (Just (0, htSize))
bl <- VM.new buckListSize
--bl <- unsafeMMapMVector "./bucketList" ReadWriteEx (Just (0, buckListSize))
h <- S.newIO
nfb <- newTVarIO 1
fs <- newTQueueIO
return $ State resz hts ht bl h nfb fs
newtype Escaped = Escaped (Vector Word8) deriving Show
newtype Hash = Hash Int deriving (Eq, Show)
--This could be interesting. It's its own hash!
instance Hashable Hash where
hashWithSalt _ (Hash h) = h
type Key = Escaped
type Val = Escaped
type Position = Int
type BucketId = Int
| jahaynes/smashy2 | src/Data/Smashy/Types.hs | mit | 1,760 | 0 | 10 | 468 | 450 | 254 | 196 | 43 | 1 |
module AST where
import qualified Data.Map as Map
import Data.Semigroup
type Name = String
type Form = String
data Pattern = Binding Name | Succ Pattern
deriving (Show, Eq)
data Param = FreeParam Name | LiteralParam Int | PatternParam Pattern | WildcardParam
deriving (Show, Eq)
data Expr = Number Int
| Constant Name
| Call Name [Expr]
| Inc Expr
deriving (Show, Eq)
type Instance = ([Param], Expr, Form)
data Definition = ConstDef Name Expr Form | FuncDef Name [Instance]
deriving Show
instance Semigroup Definition where
(FuncDef x xinsts) <> (FuncDef _ yinsts) = FuncDef x (yinsts ++ xinsts)
x <> _ = x
newtype Environment = Environment (Map.Map Name Definition)
deriving Show
instance Monoid Environment where
mempty = Environment Map.empty
(Environment a) `mappend` (Environment b) = Environment (Map.unionWith (<>) a b)
instance Semigroup Environment where
(<>) = mappend
showIntAsNat :: Int -> String
showIntAsNat 0 = "0"
showIntAsNat n = 'S' : showIntAsNat (pred n)
singletonEnv :: Name -> Definition -> Environment
singletonEnv n d = Environment $ Map.singleton n d
lookupEnv :: Name -> Environment -> Maybe Definition
lookupEnv n (Environment env) = Map.lookup n env
insertEnv :: Definition -> Environment -> Environment
insertEnv def@(ConstDef n _ _) env = singletonEnv n def <> env
insertEnv def@(FuncDef n _) env = singletonEnv n def <> env
defForm :: Definition -> [Form]
defForm (ConstDef _ _ f) = [f]
defForm (FuncDef _ is) = go is
where go [] = []
go ((_, _, f) : is') = f : go is'
type Program = [Definition]
| andreasfrom/natlang | AST.hs | mit | 1,674 | 0 | 10 | 403 | 617 | 333 | 284 | 43 | 2 |
import Data.List
takemax n (x:xs) = if x > n then [] else x: (takemax n xs)
main = print (foldl1 (+) (filter even (takemax 4000000 fibonacci)))
where
fibonacci = unfoldr (\(a, b) -> Just (a + b, (b, a + b))) (1, 1) | cptroot/ProjectEuler-Haskell | Euler2.hs | mit | 218 | 0 | 13 | 47 | 138 | 76 | 62 | 4 | 2 |
{-# LANGUAGE ForeignFunctionInterface #-}
module System.Random.SplitMix.MathOperations
( c_mix32
, c_mix64
, c_mixGamma
, xorShift33
,) where
import Data.Word (Word32, Word64)
import Data.Bits (xor, shiftR)
-- | Mixing fuction to produce 32 bit values as per the paper
foreign import ccall unsafe "mix32" c_mix32 :: Word64 -> Word32
-- | Mixing function to produce 64 bit values as per the paper
foreign import ccall unsafe "mix64" c_mix64 :: Word64 -> Word64
-- | Mixing fuction to produce gamma values as per the paper
-- always produces odd values
foreign import ccall unsafe "mix_gamma" c_mixGamma :: Word64 -> Word64
-- | Bitwise operation equivalent to f n v = (v >> n) ^ v
xorShift :: Int -> Word64 -> Word64
xorShift bits value = xor value $ shiftR value bits
-- | Bitwise operation equivalent to f v = (v >> 33) ^ v
xorShift33 :: Word64 -> Word64
xorShift33 = xorShift 33
| nkartashov/SplitMix | src/System/Random/SplitMix/MathOperations.hs | mit | 895 | 0 | 6 | 166 | 162 | 95 | 67 | 15 | 1 |
{-# LANGUAGE ScopedTypeVariables, ViewPatterns, FlexibleContexts #-}
{-|
Module : Labyrinth.Pathing.Util
Description : pathfinding utilities
Copyright : (c) deweyvm 2014
License : MIT
Maintainer : deweyvm
Stability : experimental
Portability : unknown
Functions shared across different pathfinding algorithms.
-}
module Labyrinth.Pathing.Util where
import Control.Applicative
import Control.Arrow
import Data.Maybe
import qualified Data.PSQueue as Q
import qualified Data.Map as Map
import Labyrinth.Util
expandPath :: (a -> a -> [a]) -> [a] -> [a]
expandPath _ [] = []
expandPath f xs = concat $ uncurry f <$> zip xs (tail xs)
expand :: Point -> Point -> [Point]
expand (px, py) (qx, qy) =
let dx = qx - px
dy = qy - py
sx = signum dx
sy = signum dy
n = max (abs dx) (abs dy)
iter s = (take (n+1) $ iterate (+s) 0) in
((+px) *** (+py)) <$> zip (iter sx) (iter sy)
rewindPath :: Ord a => Map.Map a a -> a -> [a] -> [a]
rewindPath path end sofar =
case Map.lookup end path of
Just next -> rewindPath path next (end:sofar)
Nothing -> sofar
euclid :: Point -> Point -> Float
euclid (i, j) (x, y) = (sqrt (xx + yy))
where xx = sq (x - i)
yy = sq (y - j)
sq = (** 2) . fromIntegral
qMember :: (Ord a, Ord b) => a -> Q.PSQ a b -> Bool
qMember = isJust .: Q.lookup
| deweyvm/labyrinth | src/Labyrinth/Pathing/Util.hs | mit | 1,385 | 0 | 13 | 362 | 522 | 283 | 239 | 32 | 2 |
main = print getProblem15Value
getProblem15Value :: Integer
getProblem15Value = getNumberOfPaths 20 20
getNumberOfPaths :: Integer -> Integer -> Integer
getNumberOfPaths x y = (fact (x+y)) `div` ((fact x) * (fact y))
fact :: Integer -> Integer
fact 0 = 1
fact 1 = 1
fact x = x * (fact (x-1))
| jchitel/ProjectEuler.hs | Problems/Problem0015.hs | mit | 296 | 0 | 9 | 55 | 135 | 71 | 64 | 9 | 1 |
{-# LANGUAGE CPP #-}
{-
ghcjs-run runs a program compiled by ghcjs with node.js
-}
module Main where
import Control.Applicative
import Data.Char
import System.Directory
import System.Environment
import System.Exit
import System.FilePath
import System.Process
main = do
args <- getArgs
path <- getExecutablePath
cd <- getCurrentDirectory
let jsExe = dropExeExtension path <.> "jsexe"
script = jsExe </> "all" <.> "js"
node <- trim <$> readFile (jsExe </> "node")
ph <- runProcess node (script:args) (Just cd) Nothing Nothing Nothing Nothing
exitWith =<< waitForProcess ph
trim :: String -> String
trim = let f = reverse . dropWhile isSpace in f . f
dropExeExtension :: FilePath -> FilePath
dropExeExtension x
| not (null exeExtension) && map toLower (takeExtension x) == exeExtension
= dropExtension x
| otherwise = x
#if !MIN_VERSION_directory(1,2,4)
exeExtension :: String
#ifdef WINDOWS
exeExtension = ".exe"
#else
exeExtension = ""
#endif
#endif
| ghcjs/ghcjs | src-bin/Run.hs | mit | 993 | 0 | 12 | 184 | 276 | 141 | 135 | 27 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{- |
Module : $Header$
Description :
Author : Nils 'bash0r' Jonsson
Copyright : (c) 2015 Nils 'bash0r' Jonsson
License : MIT
Maintainer : [email protected]
Stability : unstable
Portability : non-portable (Portability is untested.)
The 'Configuration' module contains all relevant information
-}
module Headergen.Configuration
( Configuration (..)
, createDictionary
) where
import Control.Applicative
import Control.Monad
import Data.Aeson
import Headergen.Template
-- | The configuration is used to read / write configuration to JSON.
data Configuration = Configuration
{ getModule :: String
, getDescription :: String
, getAuthor :: String
, getCopyright :: String
, getLicense :: String
, getMaintainer :: String
, getStability :: String
, getPortability :: String
}
deriving (Show, Eq)
instance ToJSON Configuration where
toJSON (Configuration mod desc auth copyr lic maint stab portab) =
object [ "module" .= mod
, "author" .= auth
, "description" .= desc
, "copyright" .= copyr
, "license" .= lic
, "maintainer" .= maint
, "stability" .= stab
, "portability" .= portab
]
instance FromJSON Configuration where
parseJSON (Object o) =
Configuration <$> o .: "module"
<*> o .: "description"
<*> o .: "author"
<*> o .: "copyright"
<*> o .: "license"
<*> o .: "maintainer"
<*> o .: "stability"
<*> o .: "portability"
parseJSON _ =
empty
createDictionary :: Configuration -> Dictionary
createDictionary (Configuration mod des aut cop lic mai sta por) =
[ ("module" , mod)
, ("description", des)
, ("author" , aut)
, ("copyright" , cop)
, ("license" , lic)
, ("maintainer" , mai)
, ("stability" , sta)
, ("portability", por)
]
| aka-bash0r/headergen | src/Headergen/Configuration.hs | mit | 2,034 | 0 | 21 | 628 | 407 | 235 | 172 | 50 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalytics-applicationreferencedatasource-referenceschema.html
module Stratosphere.ResourceProperties.KinesisAnalyticsApplicationReferenceDataSourceReferenceSchema where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.KinesisAnalyticsApplicationReferenceDataSourceRecordColumn
import Stratosphere.ResourceProperties.KinesisAnalyticsApplicationReferenceDataSourceRecordFormat
-- | Full data type definition for
-- KinesisAnalyticsApplicationReferenceDataSourceReferenceSchema. See
-- 'kinesisAnalyticsApplicationReferenceDataSourceReferenceSchema' for a
-- more convenient constructor.
data KinesisAnalyticsApplicationReferenceDataSourceReferenceSchema =
KinesisAnalyticsApplicationReferenceDataSourceReferenceSchema
{ _kinesisAnalyticsApplicationReferenceDataSourceReferenceSchemaRecordColumns :: [KinesisAnalyticsApplicationReferenceDataSourceRecordColumn]
, _kinesisAnalyticsApplicationReferenceDataSourceReferenceSchemaRecordEncoding :: Maybe (Val Text)
, _kinesisAnalyticsApplicationReferenceDataSourceReferenceSchemaRecordFormat :: KinesisAnalyticsApplicationReferenceDataSourceRecordFormat
} deriving (Show, Eq)
instance ToJSON KinesisAnalyticsApplicationReferenceDataSourceReferenceSchema where
toJSON KinesisAnalyticsApplicationReferenceDataSourceReferenceSchema{..} =
object $
catMaybes
[ (Just . ("RecordColumns",) . toJSON) _kinesisAnalyticsApplicationReferenceDataSourceReferenceSchemaRecordColumns
, fmap (("RecordEncoding",) . toJSON) _kinesisAnalyticsApplicationReferenceDataSourceReferenceSchemaRecordEncoding
, (Just . ("RecordFormat",) . toJSON) _kinesisAnalyticsApplicationReferenceDataSourceReferenceSchemaRecordFormat
]
-- | Constructor for
-- 'KinesisAnalyticsApplicationReferenceDataSourceReferenceSchema'
-- containing required fields as arguments.
kinesisAnalyticsApplicationReferenceDataSourceReferenceSchema
:: [KinesisAnalyticsApplicationReferenceDataSourceRecordColumn] -- ^ 'kaardsrsRecordColumns'
-> KinesisAnalyticsApplicationReferenceDataSourceRecordFormat -- ^ 'kaardsrsRecordFormat'
-> KinesisAnalyticsApplicationReferenceDataSourceReferenceSchema
kinesisAnalyticsApplicationReferenceDataSourceReferenceSchema recordColumnsarg recordFormatarg =
KinesisAnalyticsApplicationReferenceDataSourceReferenceSchema
{ _kinesisAnalyticsApplicationReferenceDataSourceReferenceSchemaRecordColumns = recordColumnsarg
, _kinesisAnalyticsApplicationReferenceDataSourceReferenceSchemaRecordEncoding = Nothing
, _kinesisAnalyticsApplicationReferenceDataSourceReferenceSchemaRecordFormat = recordFormatarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalytics-applicationreferencedatasource-referenceschema.html#cfn-kinesisanalytics-applicationreferencedatasource-referenceschema-recordcolumns
kaardsrsRecordColumns :: Lens' KinesisAnalyticsApplicationReferenceDataSourceReferenceSchema [KinesisAnalyticsApplicationReferenceDataSourceRecordColumn]
kaardsrsRecordColumns = lens _kinesisAnalyticsApplicationReferenceDataSourceReferenceSchemaRecordColumns (\s a -> s { _kinesisAnalyticsApplicationReferenceDataSourceReferenceSchemaRecordColumns = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalytics-applicationreferencedatasource-referenceschema.html#cfn-kinesisanalytics-applicationreferencedatasource-referenceschema-recordencoding
kaardsrsRecordEncoding :: Lens' KinesisAnalyticsApplicationReferenceDataSourceReferenceSchema (Maybe (Val Text))
kaardsrsRecordEncoding = lens _kinesisAnalyticsApplicationReferenceDataSourceReferenceSchemaRecordEncoding (\s a -> s { _kinesisAnalyticsApplicationReferenceDataSourceReferenceSchemaRecordEncoding = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalytics-applicationreferencedatasource-referenceschema.html#cfn-kinesisanalytics-applicationreferencedatasource-referenceschema-recordformat
kaardsrsRecordFormat :: Lens' KinesisAnalyticsApplicationReferenceDataSourceReferenceSchema KinesisAnalyticsApplicationReferenceDataSourceRecordFormat
kaardsrsRecordFormat = lens _kinesisAnalyticsApplicationReferenceDataSourceReferenceSchemaRecordFormat (\s a -> s { _kinesisAnalyticsApplicationReferenceDataSourceReferenceSchemaRecordFormat = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/KinesisAnalyticsApplicationReferenceDataSourceReferenceSchema.hs | mit | 4,548 | 0 | 13 | 268 | 356 | 209 | 147 | 36 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.