code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
------------------------------------------------------------------------
-- |
-- Module : Main
-- Copyright : (c) Amy de Buitléir 2012-2014
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Runs the QuickCheck tests.
--
------------------------------------------------------------------------
module Main where
import ALife.Realtra.ActionQC (test)
import ALife.Realtra.ImageQC (test)
import Test.Framework as TF (defaultMain, Test)
tests :: [TF.Test]
tests =
[
-- In increasing order of complexity
ALife.Realtra.ActionQC.test,
ALife.Realtra.ImageQC.test
]
main :: IO ()
main = defaultMain tests
|
mhwombat/creatur-realtra.OLD
|
test/Main.hs
|
bsd-3-clause
| 705 | 0 | 6 | 118 | 101 | 68 | 33 | 11 | 1 |
module Data.STM.PriorityQueue (
Impl,
module Data.STM.PriorityQueue.Class
) where
import Data.STM.PriorityQueue.Class
import Data.STM.PriorityQueue.Internal.PTSTASLPQ
-- | The default implementation
type Impl = PTSTASLPQ
|
Alllex/stm-data-collection
|
src/Data/STM/PriorityQueue.hs
|
bsd-3-clause
| 233 | 0 | 5 | 33 | 42 | 30 | 12 | 6 | 0 |
foo = do
(-) <- Just 5
return ()
|
mpickering/ghc-exactprint
|
tests/examples/ghc710/DoParens.hs
|
bsd-3-clause
| 38 | 0 | 8 | 14 | 26 | 12 | 14 | 3 | 1 |
{- |
Module : Control.Monad.Cont
Copyright : (c) The University of Glasgow 2001,
(c) Jeff Newbern 2003-2007,
(c) Andriy Palamarchuk 2007
License : BSD-style (see the file LICENSE)
Maintainer : [email protected]
Stability : experimental
Portability : portable
[Computation type:] Computations which can be interrupted and resumed.
[Binding strategy:] Binding a function to a monadic value creates
a new continuation which uses the function as the continuation of the monadic
computation.
[Useful for:] Complex control structures, error handling,
and creating co-routines.
[Zero and plus:] None.
[Example type:] @'Cont' r a@
The Continuation monad represents computations in continuation-passing style
(CPS).
In continuation-passing style function result is not returned,
but instead is passed to another function,
received as a parameter (continuation).
Computations are built up from sequences
of nested continuations, terminated by a final continuation (often @id@)
which produces the final result.
Since continuations are functions which represent the future of a computation,
manipulation of the continuation functions can achieve complex manipulations
of the future of the computation,
such as interrupting a computation in the middle, aborting a portion
of a computation, restarting a computation, and interleaving execution of
computations.
The Continuation monad adapts CPS to the structure of a monad.
Before using the Continuation monad, be sure that you have
a firm understanding of continuation-passing style
and that continuations represent the best solution to your particular
design problem.
Many algorithms which require continuations in other languages do not require
them in Haskell, due to Haskell's lazy semantics.
Abuse of the Continuation monad can produce code that is impossible
to understand and maintain.
-}
module Control.Monad.Cont (
-- * MonadCont class
MonadCont(..),
-- * The Cont monad
Cont,
cont,
runCont,
mapCont,
withCont,
-- * The ContT monad transformer
ContT(ContT),
runContT,
mapContT,
withContT,
module Control.Monad,
module Control.Monad.Trans,
-- * Example 1: Simple Continuation Usage
-- $simpleContExample
-- * Example 2: Using @callCC@
-- $callCCExample
-- * Example 3: Using @ContT@ Monad Transformer
-- $ContTExample
) where
import Control.Monad.Cont.Class
import Control.Monad.Trans
import Control.Monad.Trans.Cont
import Control.Monad
{- $simpleContExample
Calculating length of a list continuation-style:
>calculateLength :: [a] -> Cont r Int
>calculateLength l = return (length l)
Here we use @calculateLength@ by making it to pass its result to @print@:
>main = do
> runCont (calculateLength "123") print
> -- result: 3
It is possible to chain 'Cont' blocks with @>>=@.
>double :: Int -> Cont r Int
>double n = return (n * 2)
>
>main = do
> runCont (calculateLength "123" >>= double) print
> -- result: 6
-}
{- $callCCExample
This example gives a taste of how escape continuations work, shows a typical
pattern for their usage.
>-- Returns a string depending on the length of the name parameter.
>-- If the provided string is empty, returns an error.
>-- Otherwise, returns a welcome message.
>whatsYourName :: String -> String
>whatsYourName name =
> (`runCont` id) $ do -- 1
> response <- callCC $ \exit -> do -- 2
> validateName name exit -- 3
> return $ "Welcome, " ++ name ++ "!" -- 4
> return response -- 5
>
>validateName name exit = do
> when (null name) (exit "You forgot to tell me your name!")
Here is what this example does:
(1) Runs an anonymous 'Cont' block and extracts value from it with
@(\`runCont\` id)@. Here @id@ is the continuation, passed to the @Cont@ block.
(1) Binds @response@ to the result of the following 'Control.Monad.Cont.Class.callCC' block,
binds @exit@ to the continuation.
(1) Validates @name@.
This approach illustrates advantage of using 'Control.Monad.Cont.Class.callCC' over @return@.
We pass the continuation to @validateName@,
and interrupt execution of the @Cont@ block from /inside/ of @validateName@.
(1) Returns the welcome message from the 'Control.Monad.Cont.Class.callCC' block.
This line is not executed if @validateName@ fails.
(1) Returns from the @Cont@ block.
-}
{-$ContTExample
'ContT' can be used to add continuation handling to other monads.
Here is an example how to combine it with @IO@ monad:
>import Control.Monad.Cont
>import System.IO
>
>main = do
> hSetBuffering stdout NoBuffering
> runContT (callCC askString) reportResult
>
>askString :: (String -> ContT () IO String) -> ContT () IO String
>askString next = do
> liftIO $ putStrLn "Please enter a string"
> s <- liftIO $ getLine
> next s
>
>reportResult :: String -> IO ()
>reportResult s = do
> putStrLn ("You entered: " ++ s)
Action @askString@ requests user to enter a string,
and passes it to the continuation.
@askString@ takes as a parameter a continuation taking a string parameter,
and returning @IO ()@.
Compare its signature to 'runContT' definition.
-}
|
johanneshilden/principle
|
public/mtl-2.2.1/Control/Monad/Cont.hs
|
bsd-3-clause
| 5,202 | 0 | 5 | 1,019 | 99 | 72 | 27 | 19 | 0 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP
, MagicHash
, UnboxedTuples
, ScopedTypeVariables
#-}
{-# OPTIONS_GHC -fno-warn-deprecations #-}
-- kludge for the Control.Concurrent.QSem, Control.Concurrent.QSemN
-- and Control.Concurrent.SampleVar imports.
-----------------------------------------------------------------------------
-- |
-- Module : Control.Concurrent
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (concurrency)
--
-- A common interface to a collection of useful concurrency
-- abstractions.
--
-----------------------------------------------------------------------------
module Control.Concurrent (
-- * Concurrent Haskell
-- $conc_intro
-- * Basic concurrency operations
ThreadId,
myThreadId,
forkIO,
forkFinally,
forkIOWithUnmask,
killThread,
throwTo,
-- ** Threads with affinity
forkOn,
forkOnWithUnmask,
getNumCapabilities,
setNumCapabilities,
threadCapability,
-- * Scheduling
-- $conc_scheduling
yield,
-- ** Blocking
-- $blocking
-- ** Waiting
threadDelay,
threadWaitRead,
threadWaitWrite,
threadWaitReadSTM,
threadWaitWriteSTM,
-- * Communication abstractions
module Control.Concurrent.MVar,
module Control.Concurrent.Chan,
module Control.Concurrent.QSem,
module Control.Concurrent.QSemN,
-- * Bound Threads
-- $boundthreads
rtsSupportsBoundThreads,
forkOS,
isCurrentThreadBound,
runInBoundThread,
runInUnboundThread,
-- * Weak references to ThreadIds
mkWeakThreadId,
-- * GHC's implementation of concurrency
-- |This section describes features specific to GHC's
-- implementation of Concurrent Haskell.
-- ** Haskell threads and Operating System threads
-- $osthreads
-- ** Terminating the program
-- $termination
-- ** Pre-emption
-- $preemption
-- ** Deadlock
-- $deadlock
) where
import Control.Exception.Base as Exception
import GHC.Conc hiding (threadWaitRead, threadWaitWrite,
threadWaitReadSTM, threadWaitWriteSTM)
import GHC.IO ( unsafeUnmask )
import GHC.IORef ( newIORef, readIORef, writeIORef )
import GHC.Base
import System.Posix.Types ( Fd )
import Foreign.StablePtr
import Foreign.C.Types
#ifdef mingw32_HOST_OS
import Foreign.C
import System.IO
import Data.Functor ( void )
#else
import qualified GHC.Conc
#endif
import Control.Concurrent.MVar
import Control.Concurrent.Chan
import Control.Concurrent.QSem
import Control.Concurrent.QSemN
{- $conc_intro
The concurrency extension for Haskell is described in the paper
/Concurrent Haskell/
<http://www.haskell.org/ghc/docs/papers/concurrent-haskell.ps.gz>.
Concurrency is \"lightweight\", which means that both thread creation
and context switching overheads are extremely low. Scheduling of
Haskell threads is done internally in the Haskell runtime system, and
doesn't make use of any operating system-supplied thread packages.
However, if you want to interact with a foreign library that expects your
program to use the operating system-supplied thread package, you can do so
by using 'forkOS' instead of 'forkIO'.
Haskell threads can communicate via 'MVar's, a kind of synchronised
mutable variable (see "Control.Concurrent.MVar"). Several common
concurrency abstractions can be built from 'MVar's, and these are
provided by the "Control.Concurrent" library.
In GHC, threads may also communicate via exceptions.
-}
{- $conc_scheduling
Scheduling may be either pre-emptive or co-operative,
depending on the implementation of Concurrent Haskell (see below
for information related to specific compilers). In a co-operative
system, context switches only occur when you use one of the
primitives defined in this module. This means that programs such
as:
> main = forkIO (write 'a') >> write 'b'
> where write c = putChar c >> write c
will print either @aaaaaaaaaaaaaa...@ or @bbbbbbbbbbbb...@,
instead of some random interleaving of @a@s and @b@s. In
practice, cooperative multitasking is sufficient for writing
simple graphical user interfaces.
-}
{- $blocking
Different Haskell implementations have different characteristics with
regard to which operations block /all/ threads.
Using GHC without the @-threaded@ option, all foreign calls will block
all other Haskell threads in the system, although I\/O operations will
not. With the @-threaded@ option, only foreign calls with the @unsafe@
attribute will block all other threads.
-}
-- | fork a thread and call the supplied function when the thread is about
-- to terminate, with an exception or a returned value. The function is
-- called with asynchronous exceptions masked.
--
-- > forkFinally action and_then =
-- > mask $ \restore ->
-- > forkIO $ try (restore action) >>= and_then
--
-- This function is useful for informing the parent when a child
-- terminates, for example.
--
-- @since 4.6.0.0
forkFinally :: IO a -> (Either SomeException a -> IO ()) -> IO ThreadId
forkFinally action and_then =
mask $ \restore ->
forkIO $ try (restore action) >>= and_then
-- ---------------------------------------------------------------------------
-- Bound Threads
{- $boundthreads
#boundthreads#
Support for multiple operating system threads and bound threads as described
below is currently only available in the GHC runtime system if you use the
/-threaded/ option when linking.
Other Haskell systems do not currently support multiple operating system threads.
A bound thread is a haskell thread that is /bound/ to an operating system
thread. While the bound thread is still scheduled by the Haskell run-time
system, the operating system thread takes care of all the foreign calls made
by the bound thread.
To a foreign library, the bound thread will look exactly like an ordinary
operating system thread created using OS functions like @pthread_create@
or @CreateThread@.
Bound threads can be created using the 'forkOS' function below. All foreign
exported functions are run in a bound thread (bound to the OS thread that
called the function). Also, the @main@ action of every Haskell program is
run in a bound thread.
Why do we need this? Because if a foreign library is called from a thread
created using 'forkIO', it won't have access to any /thread-local state/ -
state variables that have specific values for each OS thread
(see POSIX's @pthread_key_create@ or Win32's @TlsAlloc@). Therefore, some
libraries (OpenGL, for example) will not work from a thread created using
'forkIO'. They work fine in threads created using 'forkOS' or when called
from @main@ or from a @foreign export@.
In terms of performance, 'forkOS' (aka bound) threads are much more
expensive than 'forkIO' (aka unbound) threads, because a 'forkOS'
thread is tied to a particular OS thread, whereas a 'forkIO' thread
can be run by any OS thread. Context-switching between a 'forkOS'
thread and a 'forkIO' thread is many times more expensive than between
two 'forkIO' threads.
Note in particular that the main program thread (the thread running
@Main.main@) is always a bound thread, so for good concurrency
performance you should ensure that the main thread is not doing
repeated communication with other threads in the system. Typically
this means forking subthreads to do the work using 'forkIO', and
waiting for the results in the main thread.
-}
-- | 'True' if bound threads are supported.
-- If @rtsSupportsBoundThreads@ is 'False', 'isCurrentThreadBound'
-- will always return 'False' and both 'forkOS' and 'runInBoundThread' will
-- fail.
rtsSupportsBoundThreads :: Bool
rtsSupportsBoundThreads = undefined
{- |
Like 'forkIO', this sparks off a new thread to run the 'IO'
computation passed as the first argument, and returns the 'ThreadId'
of the newly created thread.
However, 'forkOS' creates a /bound/ thread, which is necessary if you
need to call foreign (non-Haskell) libraries that make use of
thread-local state, such as OpenGL (see "Control.Concurrent#boundthreads").
Using 'forkOS' instead of 'forkIO' makes no difference at all to the
scheduling behaviour of the Haskell runtime system. It is a common
misconception that you need to use 'forkOS' instead of 'forkIO' to
avoid blocking all the Haskell threads when making a foreign call;
this isn't the case. To allow foreign calls to be made without
blocking all the Haskell threads (with GHC), it is only necessary to
use the @-threaded@ option when linking your program, and to make sure
the foreign import is not marked @unsafe@.
-}
forkOS :: IO () -> IO ThreadId
forkOS_entry_reimported :: StablePtr (IO ()) -> IO ()
forkOS_entry_reimported = undefined
forkOS_entry :: StablePtr (IO ()) -> IO ()
forkOS_entry stableAction = do
action <- deRefStablePtr stableAction
action
forkOS_createThread :: StablePtr (IO ()) -> IO CInt
forkOS_createThread = undefined
failNonThreaded :: IO a
failNonThreaded = fail $ "RTS doesn't support multiple OS threads "
++"(use ghc -threaded when linking)"
forkOS action0
| rtsSupportsBoundThreads = do
mv <- newEmptyMVar
b <- Exception.getMaskingState
let
-- async exceptions are masked in the child if they are masked
-- in the parent, as for forkIO (see #1048). forkOS_createThread
-- creates a thread with exceptions masked by default.
action1 = case b of
Unmasked -> unsafeUnmask action0
MaskedInterruptible -> action0
MaskedUninterruptible -> uninterruptibleMask_ action0
action_plus = Exception.catch action1 childHandler
entry <- newStablePtr (myThreadId >>= putMVar mv >> action_plus)
err <- forkOS_createThread entry
when (err /= 0) $ fail "Cannot create OS thread."
tid <- takeMVar mv
freeStablePtr entry
return tid
| otherwise = failNonThreaded
-- | Returns 'True' if the calling thread is /bound/, that is, if it is
-- safe to use foreign libraries that rely on thread-local state from the
-- calling thread.
isCurrentThreadBound :: IO Bool
isCurrentThreadBound = IO $ \ s# ->
case isCurrentThreadBound# s# of
(# s2#, flg #) -> (# s2#, isTrue# (flg /=# 0#) #)
{- |
Run the 'IO' computation passed as the first argument. If the calling thread
is not /bound/, a bound thread is created temporarily. @runInBoundThread@
doesn't finish until the 'IO' computation finishes.
You can wrap a series of foreign function calls that rely on thread-local state
with @runInBoundThread@ so that you can use them without knowing whether the
current thread is /bound/.
-}
runInBoundThread :: IO a -> IO a
runInBoundThread action
| rtsSupportsBoundThreads = do
bound <- isCurrentThreadBound
if bound
then action
else do
ref <- newIORef undefined
let action_plus = Exception.try action >>= writeIORef ref
bracket (newStablePtr action_plus)
freeStablePtr
(\cEntry -> forkOS_entry_reimported cEntry >> readIORef ref) >>=
unsafeResult
| otherwise = failNonThreaded
{- |
Run the 'IO' computation passed as the first argument. If the calling thread
is /bound/, an unbound thread is created temporarily using 'forkIO'.
@runInBoundThread@ doesn't finish until the 'IO' computation finishes.
Use this function /only/ in the rare case that you have actually observed a
performance loss due to the use of bound threads. A program that
doesn't need its main thread to be bound and makes /heavy/ use of concurrency
(e.g. a web server), might want to wrap its @main@ action in
@runInUnboundThread@.
Note that exceptions which are thrown to the current thread are thrown in turn
to the thread that is executing the given computation. This ensures there's
always a way of killing the forked thread.
-}
runInUnboundThread :: IO a -> IO a
runInUnboundThread action = do
bound <- isCurrentThreadBound
if bound
then do
mv <- newEmptyMVar
mask $ \restore -> do
tid <- forkIO $ Exception.try (restore action) >>= putMVar mv
let wait = takeMVar mv `Exception.catch` \(e :: SomeException) ->
Exception.throwTo tid e >> wait
wait >>= unsafeResult
else action
unsafeResult :: Either SomeException a -> IO a
unsafeResult = either Exception.throwIO return
-- ---------------------------------------------------------------------------
-- threadWaitRead/threadWaitWrite
-- | Block the current thread until data is available to read on the
-- given file descriptor (GHC only).
--
-- This will throw an 'IOError' if the file descriptor was closed
-- while this thread was blocked. To safely close a file descriptor
-- that has been used with 'threadWaitRead', use
-- 'GHC.Conc.closeFdWith'.
threadWaitRead :: Fd -> IO ()
threadWaitRead fd
-- #ifdef mingw32_HOST_OS
-- -- we have no IO manager implementing threadWaitRead on Windows.
-- -- fdReady does the right thing, but we have to call it in a
-- -- separate thread, otherwise threadWaitRead won't be interruptible,
-- -- and this only works with -threaded.
-- | threaded = withThread (waitFd fd 0)
-- | otherwise = case fd of
-- 0 -> do _ <- hWaitForInput stdin (-1)
-- return ()
-- -- hWaitForInput does work properly, but we can only
-- -- do this for stdin since we know its FD.
-- _ -> error "threadWaitRead requires -threaded on Windows, or use System.IO.hWaitForInput"
-- #else
= GHC.Conc.threadWaitRead fd
-- #endif
-- | Block the current thread until data can be written to the
-- given file descriptor (GHC only).
--
-- This will throw an 'IOError' if the file descriptor was closed
-- while this thread was blocked. To safely close a file descriptor
-- that has been used with 'threadWaitWrite', use
-- 'GHC.Conc.closeFdWith'.
threadWaitWrite :: Fd -> IO ()
threadWaitWrite fd
-- #ifdef mingw32_HOST_OS
-- | threaded = withThread (waitFd fd 1)
-- | otherwise = error "threadWaitWrite requires -threaded on Windows"
-- #else
= GHC.Conc.threadWaitWrite fd
-- #endif
-- | Returns an STM action that can be used to wait for data
-- to read from a file descriptor. The second returned value
-- is an IO action that can be used to deregister interest
-- in the file descriptor.
--
-- @since 4.7.0.0
threadWaitReadSTM :: Fd -> IO (STM (), IO ())
threadWaitReadSTM fd
-- #ifdef mingw32_HOST_OS
-- | threaded = do v <- newTVarIO Nothing
-- mask_ $ void $ forkIO $ do result <- try (waitFd fd 0)
-- atomically (writeTVar v $ Just result)
-- let waitAction = do result <- readTVar v
-- case result of
-- Nothing -> retry
-- Just (Right ()) -> return ()
-- Just (Left e) -> throwSTM (e :: IOException)
-- let killAction = return ()
-- return (waitAction, killAction)
-- | otherwise = error "threadWaitReadSTM requires -threaded on Windows"
-- #else
= GHC.Conc.threadWaitReadSTM fd
-- #endif
-- | Returns an STM action that can be used to wait until data
-- can be written to a file descriptor. The second returned value
-- is an IO action that can be used to deregister interest
-- in the file descriptor.
--
-- @since 4.7.0.0
threadWaitWriteSTM :: Fd -> IO (STM (), IO ())
threadWaitWriteSTM fd
-- #ifdef mingw32_HOST_OS
-- | threaded = do v <- newTVarIO Nothing
-- mask_ $ void $ forkIO $ do result <- try (waitFd fd 1)
-- atomically (writeTVar v $ Just result)
-- let waitAction = do result <- readTVar v
-- case result of
-- Nothing -> retry
-- Just (Right ()) -> return ()
-- Just (Left e) -> throwSTM (e :: IOException)
-- let killAction = return ()
-- return (waitAction, killAction)
-- | otherwise = error "threadWaitWriteSTM requires -threaded on Windows"
-- #else
= GHC.Conc.threadWaitWriteSTM fd
-- #endif
-- #ifdef mingw32_HOST_OS
-- foreign import ccall unsafe "rtsSupportsBoundThreads" threaded :: Bool
-- withThread :: IO a -> IO a
-- withThread io = do
-- m <- newEmptyMVar
-- _ <- mask_ $ forkIO $ try io >>= putMVar m
-- x <- takeMVar m
-- case x of
-- Right a -> return a
-- Left e -> throwIO (e :: IOException)
-- waitFd :: Fd -> CInt -> IO ()
-- waitFd fd write = do
-- throwErrnoIfMinus1_ "fdReady" $
-- fdReady (fromIntegral fd) write iNFINITE 0
-- iNFINITE :: CInt
-- iNFINITE = 0xFFFFFFFF -- urgh
-- foreign import ccall safe "fdReady"
-- fdReady :: CInt -> CInt -> CInt -> CInt -> IO CInt
-- #endif
-- ---------------------------------------------------------------------------
-- More docs
{- $osthreads
#osthreads# In GHC, threads created by 'forkIO' are lightweight threads, and
are managed entirely by the GHC runtime. Typically Haskell
threads are an order of magnitude or two more efficient (in
terms of both time and space) than operating system threads.
The downside of having lightweight threads is that only one can
run at a time, so if one thread blocks in a foreign call, for
example, the other threads cannot continue. The GHC runtime
works around this by making use of full OS threads where
necessary. When the program is built with the @-threaded@
option (to link against the multithreaded version of the
runtime), a thread making a @safe@ foreign call will not block
the other threads in the system; another OS thread will take
over running Haskell threads until the original call returns.
The runtime maintains a pool of these /worker/ threads so that
multiple Haskell threads can be involved in external calls
simultaneously.
The "System.IO" library manages multiplexing in its own way. On
Windows systems it uses @safe@ foreign calls to ensure that
threads doing I\/O operations don't block the whole runtime,
whereas on Unix systems all the currently blocked I\/O requests
are managed by a single thread (the /IO manager thread/) using
a mechanism such as @epoll@ or @kqueue@, depending on what is
provided by the host operating system.
The runtime will run a Haskell thread using any of the available
worker OS threads. If you need control over which particular OS
thread is used to run a given Haskell thread, perhaps because
you need to call a foreign library that uses OS-thread-local
state, then you need bound threads (see "Control.Concurrent#boundthreads").
If you don't use the @-threaded@ option, then the runtime does
not make use of multiple OS threads. Foreign calls will block
all other running Haskell threads until the call returns. The
"System.IO" library still does multiplexing, so there can be multiple
threads doing I\/O, and this is handled internally by the runtime using
@select@.
-}
{- $termination
In a standalone GHC program, only the main thread is
required to terminate in order for the process to terminate.
Thus all other forked threads will simply terminate at the same
time as the main thread (the terminology for this kind of
behaviour is \"daemonic threads\").
If you want the program to wait for child threads to
finish before exiting, you need to program this yourself. A
simple mechanism is to have each child thread write to an
'MVar' when it completes, and have the main
thread wait on all the 'MVar's before
exiting:
> myForkIO :: IO () -> IO (MVar ())
> myForkIO io = do
> mvar <- newEmptyMVar
> forkFinally io (\_ -> putMVar mvar ())
> return mvar
Note that we use 'forkFinally' to make sure that the
'MVar' is written to even if the thread dies or
is killed for some reason.
A better method is to keep a global list of all child
threads which we should wait for at the end of the program:
> children :: MVar [MVar ()]
> children = unsafePerformIO (newMVar [])
>
> waitForChildren :: IO ()
> waitForChildren = do
> cs <- takeMVar children
> case cs of
> [] -> return ()
> m:ms -> do
> putMVar children ms
> takeMVar m
> waitForChildren
>
> forkChild :: IO () -> IO ThreadId
> forkChild io = do
> mvar <- newEmptyMVar
> childs <- takeMVar children
> putMVar children (mvar:childs)
> forkFinally io (\_ -> putMVar mvar ())
>
> main =
> later waitForChildren $
> ...
The main thread principle also applies to calls to Haskell from
outside, using @foreign export@. When the @foreign export@ed
function is invoked, it starts a new main thread, and it returns
when this main thread terminates. If the call causes new
threads to be forked, they may remain in the system after the
@foreign export@ed function has returned.
-}
{- $preemption
GHC implements pre-emptive multitasking: the execution of
threads are interleaved in a random fashion. More specifically,
a thread may be pre-empted whenever it allocates some memory,
which unfortunately means that tight loops which do no
allocation tend to lock out other threads (this only seems to
happen with pathological benchmark-style code, however).
The rescheduling timer runs on a 20ms granularity by
default, but this may be altered using the
@-i\<n\>@ RTS option. After a rescheduling
\"tick\" the running thread is pre-empted as soon as
possible.
One final note: the
@aaaa@ @bbbb@ example may not
work too well on GHC (see Scheduling, above), due
to the locking on a 'System.IO.Handle'. Only one thread
may hold the lock on a 'System.IO.Handle' at any one
time, so if a reschedule happens while a thread is holding the
lock, the other thread won't be able to run. The upshot is that
the switch from @aaaa@ to
@bbbbb@ happens infrequently. It can be
improved by lowering the reschedule tick period. We also have a
patch that causes a reschedule whenever a thread waiting on a
lock is woken up, but haven't found it to be useful for anything
other than this example :-)
-}
{- $deadlock
GHC attempts to detect when threads are deadlocked using the garbage
collector. A thread that is not reachable (cannot be found by
following pointers from live objects) must be deadlocked, and in this
case the thread is sent an exception. The exception is either
'BlockedIndefinitelyOnMVar', 'BlockedIndefinitelyOnSTM',
'NonTermination', or 'Deadlock', depending on the way in which the
thread is deadlocked.
Note that this feature is intended for debugging, and should not be
relied on for the correct operation of your program. There is no
guarantee that the garbage collector will be accurate enough to detect
your deadlock, and no guarantee that the garbage collector will run in
a timely enough manner. Basically, the same caveats as for finalizers
apply to deadlock detection.
There is a subtle interaction between deadlock detection and
finalizers (as created by 'Foreign.Concurrent.newForeignPtr' or the
functions in "System.Mem.Weak"): if a thread is blocked waiting for a
finalizer to run, then the thread will be considered deadlocked and
sent an exception. So preferably don't do this, but if you have no
alternative then it is possible to prevent the thread from being
considered deadlocked by making a 'StablePtr' pointing to it. Don't
forget to release the 'StablePtr' later with 'freeStablePtr'.
-}
|
alexander-at-github/eta
|
libraries/base/Control/Concurrent.hs
|
bsd-3-clause
| 24,713 | 0 | 21 | 6,049 | 1,287 | 750 | 537 | 128 | 3 |
module RawSqlTest where
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import qualified Data.Text as T
import Init
import PersistTestPetType
import PersistentTestModels
specsWith :: Runner SqlBackend m => RunDb SqlBackend m -> Spec
specsWith runDb = describe "rawSql" $ do
it "2+2" $ runDb $ do
ret <- rawSql "SELECT 2+2" []
liftIO $ ret @?= [Single (4::Int)]
it "?-?" $ runDb $ do
ret <- rawSql "SELECT ?-?" [PersistInt64 5, PersistInt64 3]
liftIO $ ret @?= [Single (2::Int)]
it "NULL" $ runDb $ do
ret <- rawSql "SELECT NULL" []
liftIO $ ret @?= [Nothing :: Maybe (Single Int)]
it "entity" $ runDb $ do
Entity p1k p1 <- insertEntity $ Person "Mathias" 23 Nothing
Entity p2k p2 <- insertEntity $ Person "Norbert" 44 Nothing
Entity p3k _ <- insertEntity $ Person "Cassandra" 19 Nothing
Entity _ _ <- insertEntity $ Person "Thiago" 19 Nothing
Entity a1k a1 <- insertEntity $ Pet p1k "Rodolfo" Cat
Entity a2k a2 <- insertEntity $ Pet p1k "Zeno" Cat
Entity a3k a3 <- insertEntity $ Pet p2k "Lhama" Dog
Entity _ _ <- insertEntity $ Pet p3k "Abacate" Cat
escape <- ((. DBName) . connEscapeName) `fmap` ask
person <- getTableName (error "rawSql Person" :: Person)
name <- getFieldName PersonName
let query = T.concat [ "SELECT ??, ?? "
, "FROM ", person
, ", ", escape "Pet"
, " WHERE ", person, ".", escape "age", " >= ? "
, "AND ", escape "Pet", ".", escape "ownerId", " = "
, person, ".", escape "id"
, " ORDER BY ", person, ".", name
]
ret <- rawSql query [PersistInt64 20]
liftIO $ ret @?= [ (Entity p1k p1, Entity a1k a1)
, (Entity p1k p1, Entity a2k a2)
, (Entity p2k p2, Entity a3k a3) ]
ret2 <- rawSql query [PersistInt64 20]
liftIO $ ret2 @?= [ (Just (Entity p1k p1), Just (Entity a1k a1))
, (Just (Entity p1k p1), Just (Entity a2k a2))
, (Just (Entity p2k p2), Just (Entity a3k a3)) ]
ret3 <- rawSql query [PersistInt64 20]
liftIO $ ret3 @?= [ Just (Entity p1k p1, Entity a1k a1)
, Just (Entity p1k p1, Entity a2k a2)
, Just (Entity p2k p2, Entity a3k a3) ]
it "order-proof" $ runDb $ do
let p1 = Person "Zacarias" 93 Nothing
p1k <- insert p1
escape <- ((. DBName) . connEscapeName) `fmap` ask
let query = T.concat [ "SELECT ?? "
, "FROM ", escape "Person"
]
ret1 <- rawSql query []
ret2 <- rawSql query [] :: MonadIO m => SqlPersistT m [Entity (ReverseFieldOrder Person)]
liftIO $ ret1 @?= [Entity p1k p1]
liftIO $ ret2 @?= [Entity (RFOKey $ unPersonKey p1k) (RFO p1)]
it "OUTER JOIN" $ runDb $ do
let insert' :: (PersistStore backend, PersistEntity val, PersistEntityBackend val ~ BaseBackend backend, MonadIO m)
=> val -> ReaderT backend m (Key val, val)
insert' v = insert v >>= \k -> return (k, v)
(p1k, p1) <- insert' $ Person "Mathias" 23 Nothing
(p2k, p2) <- insert' $ Person "Norbert" 44 Nothing
(a1k, a1) <- insert' $ Pet p1k "Rodolfo" Cat
(a2k, a2) <- insert' $ Pet p1k "Zeno" Cat
escape <- ((. DBName) . connEscapeName) `fmap` ask
let query = T.concat [ "SELECT ??, ?? "
, "FROM ", person
, "LEFT OUTER JOIN ", pet
, " ON ", person, ".", escape "id"
, " = ", pet, ".", escape "ownerId"
, " ORDER BY ", person, ".", escape "name"]
person = escape "Person"
pet = escape "Pet"
ret <- rawSql query []
liftIO $ ret @?= [ (Entity p1k p1, Just (Entity a1k a1))
, (Entity p1k p1, Just (Entity a2k a2))
, (Entity p2k p2, Nothing) ]
it "handles lower casing" $
runDb $ do
C.runConduitRes $ rawQuery "SELECT full_name from lower_case_table WHERE my_id=5" [] C..| CL.sinkNull
C.runConduitRes $ rawQuery "SELECT something_else from ref_table WHERE id=4" [] C..| CL.sinkNull
it "commit/rollback" $ do
caseCommitRollback runDb
runDb cleanDB
caseCommitRollback :: Runner SqlBackend m => RunDb SqlBackend m -> Assertion
caseCommitRollback runDb = runDb $ do
let filt :: [Filter Person1]
filt = []
let p = Person1 "foo" 0
_ <- insert p
_ <- insert p
_ <- insert p
c1 <- count filt
c1 @== 3
transactionSave
c2 <- count filt
c2 @== 3
_ <- insert p
transactionUndo
c3 <- count filt
c3 @== 3
_ <- insert p
transactionSave
_ <- insert p
_ <- insert p
transactionUndo
c4 <- count filt
c4 @== 4
|
gbwey/persistent
|
persistent-test/src/RawSqlTest.hs
|
mit
| 5,078 | 0 | 18 | 1,805 | 1,801 | 877 | 924 | -1 | -1 |
module Deptup0 () where
import Language.Haskell.Liquid.Prelude
{-@ data Pair a b <p :: x0:a -> x1:b -> Prop> = P (x :: a) (y :: b<p x>) @-}
data Pair a b = P a b
{-@ mkP :: forall a <q :: y0:a -> y1:a -> Prop>. x: a -> y: a<q x> -> Pair <q> a a @-}
mkP :: a -> a -> Pair a a
mkP x y = P x y
incr :: Int -> Int
incr x = x + 1
baz x = mkP x (incr x)
chk :: Pair Int Int -> Bool
chk (P x y) = liquidAssertB (x < y)
prop = chk $ baz n
where n = choose 100
|
abakst/liquidhaskell
|
tests/pos/deptupW.hs
|
bsd-3-clause
| 463 | 0 | 7 | 132 | 164 | 87 | 77 | 12 | 1 |
-- | Display mode is for drawing a static picture.
module Graphics.Gloss.Interface.Pure.Display
( module Graphics.Gloss.Data.Display
, module Graphics.Gloss.Data.Picture
, module Graphics.Gloss.Data.Color
, display)
where
import Graphics.Gloss.Data.Display
import Graphics.Gloss.Data.Picture
import Graphics.Gloss.Data.Color
import Graphics.Gloss.Internals.Interface.Display
import Graphics.Gloss.Internals.Interface.Backend
-- | Open a new window and display the given picture.
--
-- Use the following commands once the window is open:
--
-- * Quit - esc-key.
--
-- * Move Viewport - left-click drag, arrow keys.
--
-- * Rotate Viewport - right-click drag, control-left-click drag, or home\/end-keys.
--
-- * Zoom Viewport - mouse wheel, or page up\/down-keys.
--
display :: Display -- ^ Display mode.
-> Color -- ^ Background color.
-> Picture -- ^ The picture to draw.
-> IO ()
display = displayWithBackend defaultBackendState
|
gscalzo/HaskellTheHardWay
|
gloss-try/gloss-master/gloss/Graphics/Gloss/Interface/Pure/Display.hs
|
mit
| 1,021 | 2 | 9 | 214 | 123 | 87 | 36 | 15 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.Path2D
(js_newPath2D, newPath2D, js_newPath2D', newPath2D',
js_newPath2D'', newPath2D'', js_addPath, addPath, js_closePath,
closePath, js_moveTo, moveTo, js_lineTo, lineTo,
js_quadraticCurveTo, quadraticCurveTo, js_bezierCurveTo,
bezierCurveTo, js_arcTo, arcTo, js_rect, rect, js_arc, arc, Path2D,
castToPath2D, gTypePath2D)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "new window[\"Path2D\"]()"
js_newPath2D :: IO (JSRef Path2D)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Path2D Mozilla Path2D documentation>
newPath2D :: (MonadIO m) => m Path2D
newPath2D = liftIO (js_newPath2D >>= fromJSRefUnchecked)
foreign import javascript unsafe "new window[\"Path2D\"]($1)"
js_newPath2D' :: JSRef Path2D -> IO (JSRef Path2D)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Path2D Mozilla Path2D documentation>
newPath2D' :: (MonadIO m) => Maybe Path2D -> m Path2D
newPath2D' path
= liftIO
(js_newPath2D' (maybe jsNull pToJSRef path) >>= fromJSRefUnchecked)
foreign import javascript unsafe "new window[\"Path2D\"]($1)"
js_newPath2D'' :: JSString -> IO (JSRef Path2D)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Path2D Mozilla Path2D documentation>
newPath2D'' :: (MonadIO m, ToJSString text) => text -> m Path2D
newPath2D'' text
= liftIO (js_newPath2D'' (toJSString text) >>= fromJSRefUnchecked)
foreign import javascript unsafe "$1[\"addPath\"]($2, $3)"
js_addPath ::
JSRef Path2D -> JSRef Path2D -> JSRef SVGMatrix -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Path2D.addPath Mozilla Path2D.addPath documentation>
addPath ::
(MonadIO m) => Path2D -> Maybe Path2D -> Maybe SVGMatrix -> m ()
addPath self path transform
= liftIO
(js_addPath (unPath2D self) (maybe jsNull pToJSRef path)
(maybe jsNull pToJSRef transform))
foreign import javascript unsafe "$1[\"closePath\"]()" js_closePath
:: JSRef Path2D -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Path2D.closePath Mozilla Path2D.closePath documentation>
closePath :: (MonadIO m) => Path2D -> m ()
closePath self = liftIO (js_closePath (unPath2D self))
foreign import javascript unsafe "$1[\"moveTo\"]($2, $3)" js_moveTo
:: JSRef Path2D -> Float -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Path2D.moveTo Mozilla Path2D.moveTo documentation>
moveTo :: (MonadIO m) => Path2D -> Float -> Float -> m ()
moveTo self x y = liftIO (js_moveTo (unPath2D self) x y)
foreign import javascript unsafe "$1[\"lineTo\"]($2, $3)" js_lineTo
:: JSRef Path2D -> Float -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Path2D.lineTo Mozilla Path2D.lineTo documentation>
lineTo :: (MonadIO m) => Path2D -> Float -> Float -> m ()
lineTo self x y = liftIO (js_lineTo (unPath2D self) x y)
foreign import javascript unsafe
"$1[\"quadraticCurveTo\"]($2, $3,\n$4, $5)" js_quadraticCurveTo ::
JSRef Path2D -> Float -> Float -> Float -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Path2D.quadraticCurveTo Mozilla Path2D.quadraticCurveTo documentation>
quadraticCurveTo ::
(MonadIO m) => Path2D -> Float -> Float -> Float -> Float -> m ()
quadraticCurveTo self cpx cpy x y
= liftIO (js_quadraticCurveTo (unPath2D self) cpx cpy x y)
foreign import javascript unsafe
"$1[\"bezierCurveTo\"]($2, $3, $4,\n$5, $6, $7)" js_bezierCurveTo
::
JSRef Path2D ->
Float -> Float -> Float -> Float -> Float -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Path2D.bezierCurveTo Mozilla Path2D.bezierCurveTo documentation>
bezierCurveTo ::
(MonadIO m) =>
Path2D ->
Float -> Float -> Float -> Float -> Float -> Float -> m ()
bezierCurveTo self cp1x cp1y cp2x cp2y x y
= liftIO (js_bezierCurveTo (unPath2D self) cp1x cp1y cp2x cp2y x y)
foreign import javascript unsafe
"$1[\"arcTo\"]($2, $3, $4, $5, $6)" js_arcTo ::
JSRef Path2D -> Float -> Float -> Float -> Float -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Path2D.arcTo Mozilla Path2D.arcTo documentation>
arcTo ::
(MonadIO m) =>
Path2D -> Float -> Float -> Float -> Float -> Float -> m ()
arcTo self x1 y1 x2 y2 radius
= liftIO (js_arcTo (unPath2D self) x1 y1 x2 y2 radius)
foreign import javascript unsafe "$1[\"rect\"]($2, $3, $4, $5)"
js_rect ::
JSRef Path2D -> Float -> Float -> Float -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Path2D.rect Mozilla Path2D.rect documentation>
rect ::
(MonadIO m) => Path2D -> Float -> Float -> Float -> Float -> m ()
rect self x y width height
= liftIO (js_rect (unPath2D self) x y width height)
foreign import javascript unsafe
"$1[\"arc\"]($2, $3, $4, $5, $6,\n$7)" js_arc ::
JSRef Path2D ->
Float -> Float -> Float -> Float -> Float -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Path2D.arc Mozilla Path2D.arc documentation>
arc ::
(MonadIO m) =>
Path2D -> Float -> Float -> Float -> Float -> Float -> Bool -> m ()
arc self x y radius startAngle endAngle anticlockwise
= liftIO
(js_arc (unPath2D self) x y radius startAngle endAngle
anticlockwise)
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/JSFFI/Generated/Path2D.hs
|
mit
| 6,260 | 132 | 10 | 1,164 | 1,638 | 885 | 753 | 101 | 1 |
-- | Load the Default profile in SWTOR and save it with the name "Default".
-- Then run this to generate Default.hs.
module Main (main) where
import Text.Show.Pretty
import SWTOR.UIProfile
import SWTOR.UIProfile.XMLSerialization
main :: IO ()
main = do
profilePath <- getProfilePath "Default"
let hsPath = "src/SWTOR/UIProfile/Internal/Default.hs.new"
putStrLn ("Reading " ++ show profilePath)
prof <- readProfile profilePath
putStrLn ("Writing " ++ show hsPath)
writeFile hsPath . unlines $
[ "{-# LANGUAGE OverloadedStrings #-}"
, "-- | This module has been generated automatically by swtor-ui-generate-default."
, "module SWTOR.UIProfile.Internal.Default (defaultXMLProfile) where"
, "import Data.Map"
, "import SWTOR.UIProfile.XMLSerialization"
, "defaultXMLProfile :: XMLProfile"
, "defaultXMLProfile = " ++ ppShow prof
]
|
ion1/swtor-ui
|
app/GenerateDefault.hs
|
mit
| 875 | 0 | 10 | 155 | 146 | 76 | 70 | 19 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Data.Enumerator.Hoist where
import Control.Monad.Hoist
import qualified Data.Enumerator as E
instance MonadHoist (E.Iteratee a) where
hoist f m = E.Iteratee $ do
v <- f $ E.runIteratee m
case v of
E.Continue c -> return $ E.Continue ((hoist f) . c)
E.Yield b s -> return $ E.Yield b s
E.Error e -> return $ E.Error e
|
DanielWaterworth/siege
|
src/Data/Enumerator/Hoist.hs
|
mit
| 394 | 0 | 17 | 93 | 154 | 78 | 76 | 11 | 0 |
-- Sum Times Tables
-- http://www.codewars.com/kata/551e51155ed5ab41450006e1/
module Codewars.Kata.Sum where
sumTimesTables :: [Integer] -> Integer -> Integer -> Integer
sumTimesTables tbl n m = sum . map (* ((n+m) * (m-n+1) `div` 2)) $ tbl
|
gafiatulin/codewars
|
src/Beta/Sum.hs
|
mit
| 243 | 0 | 14 | 36 | 87 | 50 | 37 | 3 | 1 |
import Control.Monad
import Data.List
readNumbers :: String -> [Int]
readNumbers = map read . words
readPair [a, b] = (a, b)
buy k toys = length $ takeWhile (<k) $ scanl (+) 0 toys
main :: IO ()
main = do
input <- getLine
let (n, k) = readPair (readNumbers input)
input <- getLine
let toys = sort $ readNumbers input
-- print toys
let ans = buy k toys
print $ ans - 1
|
mgrebenets/hackerrank
|
alg/greedy/mark-and-toys.hs
|
mit
| 399 | 0 | 12 | 106 | 185 | 94 | 91 | 14 | 1 |
{-# htermination addListToFM_C :: (b -> b -> b) -> FiniteMap Float b -> [(Float,b)] -> FiniteMap Float b #-}
import FiniteMap
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/FiniteMap_addListToFM_C_6.hs
|
mit
| 126 | 0 | 3 | 22 | 5 | 3 | 2 | 1 | 0 |
{-# LANGUAGE ScopedTypeVariables, FlexibleContexts #-}
-- | Implementation details and the internal API.
module Hahet.Internal where
import Prelude hiding (FilePath)
import Control.Applicative
import Control.Lens
import Control.Monad.RWS
import Control.Monad.Logger
import Control.Monad.Reader
import Data.Text (Text)
import qualified Data.Text as T
import Data.Tree
import Data.Typeable
import Shelly
import System.Log.FastLogger
import Debug.Trace as D
import Hahet.Logging
default (Text)
----------------------------------------------------
-- * Types
-- ** Configuration
-- | Hahet configurations
class (Typeable variant, Typeable conf) => Hahet variant conf where
type ConfigId = Text
-- | Configurations are defined in the C Monad.
--
-- In C conf a, we have access to
--
-- 1. Read-only configuration data conf which holds parameters for the
-- configuration,
-- 2. Actual configuration as Configuration conf, and
-- 3. Support to log messages from configuring stage.
--
-- Note that C is pure, so no I/O operations may be performed at
-- configuration stage. This way we ensure that the resulting application
-- is independant of the system it was built on, and therefore can be
-- applied to any system.
newtype C variant conf a = C {
unC :: RWS conf [LogMsg] (Application variant) a
} deriving ( Functor, Applicative, Monad, MonadReader conf
, MonadWriter [LogMsg], MonadState (Application variant))
-- ** Apply
-- | An @Application config@ holds everything necessary to apply the
-- configuration against a system given necessary parameters. The system
-- may be the local system or the configuration may be applied over SSH.
data Application variant = Application
{ _appTargets :: [AppTarget variant] }
data AppTarget variant where
MkTarget :: Target variant conf target => conf -> target -> AppTarget variant
MkSubApp :: Application variant -> AppTarget variant
-- | System-changing operations are described in @Apply@.
newtype Apply variant conf a = Apply {
unApply :: ReaderT (conf, RTConf variant) Sh a
} deriving ( Functor, Applicative, Monad, MonadIO
, MonadReader (conf, RTConf variant))
-- | Run-time configuration
data RTConf variant = RTConf
{ _rtApplication :: Application variant
, _rtLogLevel :: Bool -- ^ Debug or not
, _rtLogger :: Logger
}
-- ** Targets
-- | Targets are the primitives of configurations. They may be applied
-- (@manage@), or revoked (@revoke@).
--
-- Minimal complete definition: @targetApply@.
class (Hahet variant conf, Typeable target) => Target variant conf target where
-- | How to describe a target in (verbose) logging. Default
-- implementation: `typeOf target`.
targetDesc :: conf -> target -> Text
targetDesc _ = T.pack . show . typeOf
-- | Executes the target. Returns the results from doing so. See
-- @ApplyResult@ for possible results.
targetApply :: target -> Apply variant conf ApplyResult
-- | Apply a list of targets of some type. Default implementation:
-- `mapM targetApply`. May be overridden for efficiency and finer
-- control.
targetApplyAll :: [target] -> Apply variant conf [ApplyResult]
targetApplyAll = mapM targetApply
-- | To check whether the target conflicts with another target of the same
-- type of target. The default implementation assumes targets won't
-- confilct (always returns Nothing).
targetConflicts :: target -> target -> Maybe Conflict
targetConflicts _ _ = Nothing
--instance Target c t => Target c [t] where
-- targetDesc _ [] = "(no targets)"
-- targetDesc _ (_:[]) = "one target"
-- targetDesc _ xs = T.pack (show $ length xs) <> " targets"
-- targetApply = liftM ResMany . targetApplyAll
-- | Explanation for a possible conflict between two targets (of same type).
type Conflict = Text
-- ** Results
-- | Result from applying a target.
data ApplyResult = ResSuccess -- ^ Target was successfully applied.
| ResFailed Text -- ^ Target failed to apply.
| ResNoop -- ^ Target had nothing to do.
| ResOther Text -- ^ Something else happened.
| ResMany [ApplyResult] -- ^ There may also be many results
deriving Typeable
-- | Apply-time flags.
data Flag = ModuleFlag Text
| DevFlag Text
-- | The results from applying a configuration.
type Results conf = Forest (String, ApplyResult)
----------------------------------------------------
-- * Base machinery
-- Note that functions below use these lenses! ----------
makeLenses ''Application
makeLenses ''RTConf
-- ** Evaluate configuration
-- | Evaluate a configuration to an application.
runConfig :: Hahet variant conf => conf -> C variant conf () -> (Application variant, [LogMsg])
runConfig conf comp = (app, logs)
where
(_, app, logs) = applyConfig conf initApplication comp
applyConfig :: Hahet variant conf => conf -> Application variant -> C variant conf a
-> (a, Application variant, [LogMsg])
applyConfig conf app c = runRWS (unC c) conf app
-- | Include a configuration inside a configuration isolated from the
-- parent configuration.
section :: (Hahet variant conf0, Hahet variant conf1)
=> C variant conf1 a -> conf1 -> C variant conf0 a
section comp conf = do
addTarget (MkSubApp subapp)
mapM_ (logc . LogMsgFrom (T.pack . show $ typeOf conf)) sublogs
return res
where
(res, subapp, sublogs) = applyConfig conf initApplication comp
addTarget :: Hahet variant conf => AppTarget variant -> C variant conf ()
addTarget target = do
modify $ Application . (:) target . _appTargets
-- | Create a new empty application.
--
-- Note that applying an empty application is the same as not doing
-- anything at all.
initApplication :: Application variant
initApplication = Application [] -- c mempty mempty
getConf :: C variant conf conf
getConf = ask
-- * Logging
instance Hahet variant conf => MonadLogger (Apply variant conf) where
monadLoggerLog loc logsource loglevel msg =
applyLogger loc logsource loglevel msg =<< view (_2.rtLogger)
-- | A debug message
debug :: Text -> C variant conf ()
debug = logc . LogMsgDebug
logc :: LogMsg -> C variant conf ()
logc = tell . return
-- * Utilities
resNoop :: Monad m => m ApplyResult
resNoop = return ResNoop
resSuccess :: Monad m => m ApplyResult
resSuccess = return ResSuccess
resFailed :: Monad m => Text -> m ApplyResult
resFailed = return . ResFailed
ppApplication :: Application variant -> String
ppApplication = undefined -- drawForest . map (either id fst <$>)
ppResults :: Results c -> String
ppResults = drawForest . map (fmap fst)
-- | traceShow a a
tr :: Show a => a -> a
tr a = D.traceShow a a
|
SimSaladin/hahet
|
src/Hahet/Internal.hs
|
mit
| 6,976 | 0 | 14 | 1,607 | 1,311 | 730 | 581 | -1 | -1 |
import Data.Vector (Vector)
import qualified Data.Vector as Vec
triangle :: Vector (Vector Int)
triangle = Vec.fromList [
Vec.fromList [75],
Vec.fromList [95, 64],
Vec.fromList [17, 47, 82],
Vec.fromList [18, 35, 87, 10],
Vec.fromList [20, 04, 82, 47, 65],
Vec.fromList [19, 01, 23, 75, 03, 34],
Vec.fromList [88, 02, 77, 73, 07, 63, 67],
Vec.fromList [99, 65, 04, 28, 06, 16, 70, 92],
Vec.fromList [41, 41, 26, 56, 83, 40, 80, 70, 33],
Vec.fromList [41, 48, 72, 33, 47, 32, 37, 16, 94, 29],
Vec.fromList [53, 71, 44, 65, 25, 43, 91, 52, 97, 51, 14],
Vec.fromList [70, 11, 33, 28, 77, 73, 17, 78, 39, 68, 17, 57],
Vec.fromList [91, 71, 52, 38, 17, 14, 91, 43, 58, 50, 27, 29, 48],
Vec.fromList [63, 66, 04, 68, 89, 53, 67, 30, 73, 16, 69, 87, 40, 31],
Vec.fromList [04, 62, 98, 27, 23, 09, 70, 98, 73, 93, 38, 53, 60, 04, 23]]
maxPathSum :: Vector (Vector Int) -> Int
maxPathSum t = f t 0 0
where
f t iy ix
| iy == Vec.length t - 1 = t Vec.! iy Vec.! ix
| otherwise = (t Vec.! iy Vec.! ix) + (max (f t (iy+1) ix) (f t (iy+1) (ix+1)))
solve :: Int
solve = maxPathSum triangle
main = putStrLn $ show solve
|
pshendry/project-euler-solutions
|
0018/solution.hs
|
mit
| 1,186 | 0 | 14 | 297 | 703 | 414 | 289 | 27 | 1 |
module Logic.Simulation where
import Logic.Types
import Logic.Data.Units
import Control.Lens
import Control.Monad.State
import Data.Maybe (fromJust)
import qualified Data.Map as Map (lookup, fromList)
tickTime = 1.0/60.0 :: Float
simulationStep :: PlayerCommandSet -> GameState -> GameState
simulationStep cmds g = flip execState g $ do
tick += 1
-- first we want to advance all units that are already in motion
units . traversed . movementCooldown %= cooldown
units . traversed . filtered isMoving . attackCooldown %= cooldown
-- move the ones that are moving
units . traversed %= move
-- then see if any of those is ready to make an action
--actions <- units . traversed %= makeAction
return ()
isMoving :: Unit -> Bool
isMoving u = case u ^. unitState of
Moving {} -> True
_ -> False
-- | The sub-tick values can only be used in the next following tick
cooldown a
| a == 0 = 0
| a < 0 = 0
| otherwise = a - tickTime
applyMovement :: Direction -> Position -> Position
applyMovement d (Position x y) = Position (x+dX) (y+dY)
where
Position dX dY = toMovement d
toMovement :: Direction -> Position
toMovement DirN = Position 0 (-1)
toMovement DirNE = Position 1 (-1)
toMovement DirNW = Position (-1) (-1)
toMovement DirW = Position (-1) 0
toMovement DirE = Position 1 0
toMovement DirS = Position 0 1
toMovement DirSE = Position 1 1
toMovement DirSW = Position (-1) 1
move :: Unit -> Unit
move = execState $ do
mcd <- use movementCooldown
tp <- use unitType
let unitData = fromJust $ Map.lookup tp unitsBase
let speed = unitData ^. movementSpeed
if mcd <= 0
then do
(position %=) . applyMovement =<< use direction
movementCooldown .= speed
(direction .=) =<< calculateDirection
else return ()
-- | This function should probably be replaced by actual pathfinding
calculateDirection :: State Unit Direction
calculateDirection = do
(Position x y) <- use position
(Position tx ty) <- findTargetPosition
let dx = tx - x
let dy = ty - y
return $
if dx == 0 then
if dy == 0 then DirN -- on target, essentially impossible
else if dy > 0 then DirS
else DirN
else if dx > 0 then
if dy == 0 then DirE
else if dy > 0 then DirSE
else DirNE
else if dy == 0 then DirW
else if dy > 0 then DirSW
else DirNW
findTargetPosition :: State Unit Position
findTargetPosition = do
(Moving t) <- use unitState
case t of
(PositionTarget p) -> return p
(UnitTarget uid) -> error "not implemented yet, also change signature"
|
HarvestGame/logic-prototype
|
src/Logic/Simulation.hs
|
mit
| 2,885 | 0 | 13 | 920 | 834 | 425 | 409 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable , ExistentialQuantification
,ScopedTypeVariables, StandaloneDeriving, RecordWildCards, FlexibleContexts, CPP
,GeneralizedNewtypeDeriving #-}
module Main where
import Prelude hiding (div)
import Transient.Base
#ifdef ghcjs_HOST_OS
hiding ( option,runCloud')
#endif
import GHCJS.HPlay.View
#ifdef ghcjs_HOST_OS
hiding (map)
#else
hiding (map, option,runCloud')
#endif
import Transient.Move hiding(teleport)
import Control.Applicative
import Control.Monad
import Data.Typeable
import Data.IORef
import Control.Concurrent (threadDelay)
import Control.Monad.IO.Class
import Data.Monoid
import Data.String
{-
Example with different input fields with events and haskell combinators
The hplayground version is running at:
http://tryplayg.herokuapp.com/try/widgets.hs/edit
That running version uses the Haste haskell to JS compiler, while this has to be compiled
with GHCJS. Some differences:
This is a client AND server side app. while the hplayground one is purely client-side
If you have installed transient, transient-universe and ghcjs-hplay packages, just compile and run it with
ghcjs examples/widgets.hs -o static/out
runghc examples/widgets.hs
Also is different:
now Widgets run in his own monad. To render them and convert them to the Transient monad it
uses `render`. Since `simpleWebApp` expect a `Cloud` application, use `local` to run a local transient computation. `onBrowser` only execute in the web browser, so the server application does nothing. Simply stay watching at the port 8081 for browser requests.
Also the <br> tags have been moved to the widgets and the **> has been substituted by the more standard <|> operator. In the other side, rawHtml (=== wraw) is more readable.
-}
main= simpleWebApp 8081 $ onBrowser $ local $ buttons <|> linksample
where
linksample= do
r <- render $ br ++> br ++> wlink "Hi!" (toElem "This link say Hi!")`fire` OnClick
render $ rawHtml . b $ " returns "++ r
buttons :: TransIO ()
buttons= do
render . rawHtml $ p "Different input elements:"
radio <|> checkButton <|> select
checkButton :: TransIO ()
checkButton=do
rs <- render $ br ++> br ++> getCheckBoxes(
((setCheckBox False "Red" <++ b "red") `fire` OnClick)
<> ((setCheckBox False "Green" <++ b "green") `fire` OnClick)
<> ((setCheckBox False "blue" <++ b "blue") `fire` OnClick))
render $ rawHtml $ fromString " returns: " <> b (show rs)
radio :: TransIO ()
radio= do
r <- render $ getRadio [fromString v ++> setRadioActive v
| v <- ["red","green","blue"]]
render $ rawHtml $ fromString " returns: " <> b ( show r )
select :: TransIO ()
select= do
r <- render $ br ++> br ++> getSelect
( setOption "red" (fromString "red")
<|> setOption "green" (fromString "green")
<|> setOption "blue" (fromString "blue"))
`fire` OnClick
render $ rawHtml $ fromString " returns: " <> b ( show r )
|
geraldus/transient-universe
|
examples/widgets.hs
|
mit
| 3,228 | 0 | 19 | 826 | 577 | 303 | 274 | 45 | 1 |
module Utilities (
build, buildWithCmdOptions, buildWithResources, applyPatch, runBuilder,
runBuilderWith, builderEnvironment, needBuilder, needLibrary,
installDirectory, installData, installScript, installProgram, linkSymbolic,
contextDependencies, stage1Dependencies, libraryTargets, topsortPackages,
contextDependencies'
) where
import qualified System.Directory.Extra as IO
import Hadrian.Haskell.Cabal
import Hadrian.Oracles.ArgsHash
import Hadrian.Oracles.Path
import Hadrian.Utilities
import CommandLine
import Context
import qualified Context as C
import Expression hiding (builder, inputs, outputs, way, stage, package)
import GHC
import Oracles.Setting
import Oracles.PackageData
import Settings
import Settings.Builders.Ar
import Target
import UserSettings
-- | Build a 'Target' with the right 'Builder' and command line arguments.
-- Force a rebuild if the argument list has changed since the last build.
build :: Target -> Action ()
build = customBuild [] []
-- | Build a 'Target' with the right 'Builder' and command line arguments,
-- acquiring necessary resources. Force a rebuild if the argument list has
-- changed since the last build.
buildWithResources :: [(Resource, Int)] -> Target -> Action ()
buildWithResources rs = customBuild rs []
-- | Build a 'Target' with the right 'Builder' and command line arguments,
-- using given options when executing the build command. Force a rebuild if
-- the argument list has changed since the last build.
buildWithCmdOptions :: [CmdOption] -> Target -> Action ()
buildWithCmdOptions = customBuild []
customBuild :: [(Resource, Int)] -> [CmdOption] -> Target -> Action ()
customBuild rs opts target = do
let targetBuilder = builder target
needBuilder targetBuilder
path <- builderPath targetBuilder
argList <- interpret target getArgs
verbose <- interpret target verboseCommands
let quietlyUnlessVerbose = if verbose then withVerbosity Loud else quietly
trackArgsHash target -- Rerun the rule if the hash of argList has changed.
withResources rs $ do
putInfo target
quietlyUnlessVerbose $ case targetBuilder of
Ar _ -> do
output <- interpret target getOutput
if "//*.a" ?== output
then arCmd path argList
else do
input <- interpret target getInput
top <- topDirectory
echo <- cmdEcho
cmd echo [Cwd output] [path] "x" (top -/- input)
Configure dir -> do
-- Inject /bin/bash into `libtool`, instead of /bin/sh, otherwise Windows breaks.
-- TODO: Figure out why.
bash <- bashPath
echo <- cmdEcho
let env = AddEnv "CONFIG_SHELL" bash
cmd Shell echo env [Cwd dir] [path] opts argList
HsCpp -> captureStdout target path argList
GenApply -> captureStdout target path argList
GenPrimopCode -> do
src <- interpret target getInput
file <- interpret target getOutput
input <- readFile' src
Stdout output <- cmd (Stdin input) [path] argList
writeFileChanged file output
Make dir -> do
echo <- cmdEcho
cmd Shell echo path ["-C", dir] argList
_ -> do
echo <- cmdEcho
cmd echo [path] argList
-- | Suppress build output depending on the @--progress-info@ flag.
cmdEcho :: Action CmdOption
cmdEcho = do
progressInfo <- cmdProgressInfo
return $ EchoStdout (progressInfo `elem` [Normal, Unicorn])
-- | Run a builder, capture the standard output, and write it to a given file.
captureStdout :: Target -> FilePath -> [String] -> Action ()
captureStdout target path argList = do
file <- interpret target getOutput
Stdout output <- cmd [path] argList
writeFileChanged file output
-- | Apply a patch by executing the 'Patch' builder in a given directory.
applyPatch :: FilePath -> FilePath -> Action ()
applyPatch dir patch = do
let file = dir -/- patch
needBuilder Patch
path <- builderPath Patch
putBuild $ "| Apply patch " ++ file
quietly $ cmd Shell [Cwd dir] [path, "-p0 <", patch]
-- | Install a directory.
installDirectory :: FilePath -> Action ()
installDirectory dir = do
path <- fixAbsolutePathOnWindows =<< setting InstallDir
putBuild $ "| Install directory " ++ dir
quietly $ cmd path dir
-- | Install data files to a directory and track them.
installData :: [FilePath] -> FilePath -> Action ()
installData fs dir = do
path <- fixAbsolutePathOnWindows =<< setting InstallData
need fs
forM_ fs $ \f -> putBuild $ "| Install data " ++ f ++ " to " ++ dir
quietly $ cmd path fs dir
-- | Install an executable file to a directory and track it.
installProgram :: FilePath -> FilePath -> Action ()
installProgram f dir = do
path <- fixAbsolutePathOnWindows =<< setting InstallProgram
need [f]
putBuild $ "| Install program " ++ f ++ " to " ++ dir
quietly $ cmd path f dir
-- | Install an executable script to a directory and track it.
installScript :: FilePath -> FilePath -> Action ()
installScript f dir = do
path <- fixAbsolutePathOnWindows =<< setting InstallScript
need [f]
putBuild $ "| Install script " ++ f ++ " to " ++ dir
quietly $ cmd path f dir
-- | Create a symbolic link from source file to target file (when symbolic links
-- are supported) and track the source file.
linkSymbolic :: FilePath -> FilePath -> Action ()
linkSymbolic source target = do
lns <- setting LnS
unless (null lns) $ do
need [source] -- Guarantee source is built before printing progress info.
let dir = takeDirectory target
liftIO $ IO.createDirectoryIfMissing True dir
putProgressInfo =<< renderAction "Create symbolic link" source target
quietly $ cmd lns source target
isInternal :: Builder -> Bool
isInternal = isJust . builderProvenance
-- | Make sure a 'Builder' exists and rebuild it if out of date.
needBuilder :: Builder -> Action ()
needBuilder (Configure dir) = need [dir -/- "configure"]
needBuilder (Make dir) = need [dir -/- "Makefile"]
needBuilder builder = when (isInternal builder) $ do
path <- builderPath builder
need [path]
-- | Write a Builder's path into a given environment variable.
builderEnvironment :: String -> Builder -> Action CmdOption
builderEnvironment variable builder = do
needBuilder builder
path <- builderPath builder
return $ AddEnv variable path
runBuilder :: Builder -> [String] -> Action ()
runBuilder = runBuilderWith []
-- | Run a builder with given list of arguments using custom 'cmd' options.
runBuilderWith :: [CmdOption] -> Builder -> [String] -> Action ()
runBuilderWith options builder args = do
needBuilder builder
path <- builderPath builder
let note = if null args then "" else " (" ++ intercalate ", " args ++ ")"
putBuild $ "| Run " ++ show builder ++ note
quietly $ cmd options [path] args
-- | Given a 'Context' this 'Action' looks up its package dependencies and wraps
-- the results in appropriate contexts. The only subtlety here is that we never
-- depend on packages built in 'Stage2' or later, therefore the stage of the
-- resulting dependencies is bounded from above at 'Stage1'. To compute package
-- dependencies we scan package @.cabal@ files, see 'pkgDependencies' defined
-- in "Hadrian.Haskell.Cabal".
contextDependencies :: Context -> Action [Context]
contextDependencies Context {..} = do
exists <- doesFileExist (pkgCabalFile package)
if not exists then return [] else do
let pkgContext = \pkg -> Context (min stage Stage1) pkg way
deps <- pkgDependencies package
pkgs <- sort <$> interpretInContext (pkgContext package) getPackages
return . map pkgContext $ intersectOrd (compare . pkgName) pkgs deps
-- | Lookup dependencies of a 'Package' in the vanilla Stage1 context.
stage1Dependencies :: Package -> Action [Package]
stage1Dependencies =
fmap (map Context.package) . contextDependencies . vanillaContext Stage1
-- | Given a library 'Package' this action computes all of its targets. See
-- 'packageTargets' for the explanation of the @includeGhciLib@ parameter.
libraryTargets :: Bool -> Context -> Action [FilePath]
libraryTargets includeGhciLib context = do
confFile <- pkgConfFile context
libFile <- pkgLibraryFile context
lib0File <- pkgLibraryFile0 context
lib0 <- buildDll0 context
ghciLib <- pkgGhciLibraryFile context
ghciFlag <- if includeGhciLib
then interpretInContext context $ getPkgData BuildGhciLib
else return "NO"
let ghci = ghciFlag == "YES" && (stage context == Stage1 || stage1Only)
return $ [ confFile, libFile ] ++ [ lib0File | lib0 ] ++ [ ghciLib | ghci ]
-- | Coarse-grain 'need': make sure all given libraries are fully built.
needLibrary :: [Context] -> Action ()
needLibrary cs = need =<< concatMapM (libraryTargets True) cs
-- HACK (izgzhen), see https://github.com/snowleopard/hadrian/issues/344.
-- | Topological sort of packages according to their dependencies.
topsortPackages :: [Package] -> Action [Package]
topsortPackages pkgs = do
elems <- mapM (\p -> (p,) <$> stage1Dependencies p) pkgs
return $ map fst $ topSort elems
where
annotateInDeg es e =
(foldr (\e' s -> if fst e' `elem` snd e then s + 1 else s) (0 :: Int) es, e)
topSort [] = []
topSort es =
let annotated = map (annotateInDeg es) es
inDegZero = map snd $ filter ((== 0). fst) annotated
in inDegZero ++ topSort (es \\ inDegZero)
-- | Print out information about the command being executed.
putInfo :: Target -> Action ()
putInfo t = putProgressInfo =<< renderAction
("Run " ++ show (builder t) ++ contextInfo)
(digest $ inputs t)
(digest $ outputs t)
where
contextInfo = concat $ [ " (" ]
++ [ "stage = " ++ show (stage $ context t) ]
++ [ ", package = " ++ pkgName (package $ context t) ]
++ [ ", way = " ++ show (way $ context t) | (way $ context t) /= vanilla ]
++ [ ")" ]
digest [] = "none"
digest [x] = x
digest (x:xs) = x ++ " (and " ++ show (length xs) ++ " more)"
contextDependencies' :: Context -> Action [Context]
contextDependencies' context = filter (\d -> C.package d /= terminfo) <$> contextDependencies context
|
izgzhen/hadrian
|
src/Utilities.hs
|
mit
| 10,577 | 0 | 21 | 2,560 | 2,679 | 1,334 | 1,345 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
module Main where
import Data.Maybe (mapMaybe)
import Data.Foldable (Foldable, foldrM, forM_)
import Data.Traversable (mapAccumL)
import Data.Array ((!))
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Lazy.UTF8 as UTF8
import Text.Regex.Base.RegexLike (RegexLike)
import qualified Text.Regex.TDFA as TDFA (Regex, makeRegex, matchAllText)
{- import qualified Text.Regex.TDFA.UTF8 -}
import qualified Data.ByteString.Lazy.Search as BoyerMoore (nonOverlappingIndices)
import Control.Concurrent.Async (async, wait)
import Control.Concurrent.Chan
import Control.DeepSeq (force)
import Control.Monad (liftM)
import System.Environment (getArgs)
import System.Directory.Tree
import Text.Printf (printf)
type MatchData = [(BL.ByteString, BL.ByteString)]
type MatchFunc = BL.ByteString -> Maybe MatchData
type MatchedLine = (Int, MatchData)
type Line = (Int, BL.ByteString)
buildFileReader :: (FilePath -> IO BL.ByteString) -> FilePath -> IO (FilePath, BL.ByteString)
buildFileReader reader path =
do
file <- reader path
return (path, file)
lazyReader :: FilePath -> IO (FilePath, BL.ByteString)
lazyReader = buildFileReader BL.readFile
regexMatcher :: RegexLike regex BL.ByteString => regex -> MatchFunc
regexMatcher needle haystack =
let
ms = TDFA.matchAllText needle haystack
ms' = map (!0) ms
haystackLength = BL.length haystack
haystackSuffix i = BL.drop (fromIntegral i) haystack
haystackSlice i n = BL.take (fromIntegral n) (haystackSuffix i)
combine i (m,(j,n)) = (j+n, (haystackSlice i (j - i), m))
(iLast, tmp) = mapAccumL combine 0 ms'
result = if haystackLength == fromIntegral iLast
then tmp
else tmp ++ [(haystackSuffix iLast, BL.empty)]
in
case ms of
[] -> Nothing
_ -> Just result
boyerMooreMatcher :: B.ByteString -> MatchFunc
boyerMooreMatcher needle haystack =
case occurences of
[] -> Nothing
_ -> Just result
where
indices = BoyerMoore.nonOverlappingIndices needle
needleLength = B.length needle
lazyNeedle = BL.fromStrict needle
occurences = map fromIntegral $ indices haystack
haystackLength = BL.length haystack
haystackSuffix i = BL.drop (fromIntegral i) haystack
haystackSlice i n = BL.take (fromIntegral n) (haystackSuffix i)
combine i j = (j + needleLength, (haystackSlice i (j - i), lazyNeedle))
(iLast, tmp) = mapAccumL combine 0 occurences
result = if haystackLength == fromIntegral iLast
then tmp
else tmp ++ [(haystackSuffix iLast, BL.empty)]
matchLine :: MatchFunc -> Line -> Maybe MatchedLine
matchLine finder (n, line) = (,) n <$> finder line
matchLines :: MatchFunc -> BL.ByteString -> [MatchedLine]
matchLines finder haystack = mapMaybe (matchLine finder) lines
where lines = [1..] `zip` UTF8.lines haystack
highlight s =
let
hBegin = UTF8.fromString "\x1b[30;43m"
hEnd = UTF8.fromString "\x1b[0m"
in
if s == BL.empty
then s
else hBegin `BL.append` s `BL.append` hEnd
formatMatchedLine :: MatchedLine -> String
formatMatchedLine (n, ms) =
printf "%i: %s" n (UTF8.toString h)
where
h = BL.concat $ map (\(s,t) -> s `BL.append` highlight t) ms
interpretArgs [needle, path] = (UTF8.fromString needle, path)
interpretArgs [needle] = interpretArgs [needle, "./"]
interpretArgs [] = interpretArgs ["foo"]
perform :: (Foldable f) => f (FilePath, BL.ByteString) -> BL.ByteString -> IO ()
perform files pat = do
fChannel <- newChan
let
{- regex = TDFA.makeRegex (Text.Regex.TDFA.UTF8.Utf8 pattern) :: TDFA.Regex -}
{- finder = regexMatcher regex -}
needle = BL.toStrict pat
finder = boyerMooreMatcher needle
spawnSearch (path, file) tasks = (: tasks) <$> async (doMatch path file)
doMatch path file = case matchLines finder file of
matches@(m : matches') ->
do mChannel <- startWithFile path
forM_ (force matches) $ reportFileMatch mChannel
endWithFile mChannel
[] -> return ()
startWithFile path = do mChannel <- newChan
writeChan fChannel (Just (path, mChannel))
return mChannel
reportFileMatch mChannel m = writeChan mChannel (Just m)
endWithFile mChannel = writeChan mChannel Nothing
printMatches mChannel = printMatches'
where printMatches' = readChan mChannel >>= printMatch
printMatch (Just m) = do putStrLn $ formatMatchedLine m
printMatches'
printMatch Nothing = putChar '\n'
printResultsLoop = readChan fChannel >>= printResults
where printResults (Just (path, mChannel)) =
do printf "%s:\n" path
printMatches mChannel
printResultsLoop
printResults Nothing = return ()
printTask <- async printResultsLoop
tasks <- foldrM spawnSearch [] files
forM_ tasks wait
writeChan fChannel Nothing
wait printTask
main :: IO ()
main = do
(needle, path) <- interpretArgs <$> getArgs
(_ :/ dirTree) <- readDirectoryWith lazyReader path
async (perform dirTree needle) >>= wait
|
marshall-lee/the_iron_searcher
|
Main.hs
|
mit
| 5,372 | 0 | 18 | 1,317 | 1,672 | 882 | 790 | 121 | 4 |
import Data.Char(ord)
import Data.List(sort)
getNames :: String -> [String]
getNames input = read $ "[" ++ input ++ "]"
charToInt c = ord c - ord '@'
wordToInt w = sum $ map charToInt w
main = do
input <- readFile "euler_0022.dat"
print $ sum $ zipWith (*) [1..] $ map wordToInt $ sort $ getNames input
|
dpieroux/euler
|
0/0022.hs
|
mit
| 316 | 0 | 12 | 70 | 142 | 71 | 71 | 9 | 1 |
-- going back and forth between chapters to remember functions
-- eg. I couldn't remember translucent so had to go back to chpt 1
-- even though I was on chpt 1
-- I couldn't tab :/ it would shift entire line
-- eg. tab = to push = forward would tab entire line of code
-- is there any way to make the code part smaller or larger
-- like a drag to increase the ratio of editor vs viewer? eg like repl.it
program = drawingOf(mouse)
mouse = colored(ear1, translucent(black)) &
colored(ear2, translucent(black)) &
colored(body, (translucent(black)))
where ear1 = translated(solidCircle(1), 2, 2)
ear2 = translated(solidCircle(1), -2, 2)
body = solidCircle(2)
-- pattern matching
-- f(x) | x == pi = 1
-- | otherwise = 2
|
kammitama5/kammitama5.github.io
|
images/play_img/notes2.hs
|
mit
| 771 | 1 | 11 | 181 | 145 | 83 | 62 | -1 | -1 |
-- Recursion Schemes
{-
Name
Catamorphism
foldr :: (a -> b -> b) -> b -> [a] -> b
Deconstructs a data structure
Anamorphism
unfoldr :: (b -> Maybe (a, b)) -> b -> [a]
Constructs a structure level by level
-}
-- | A fix-point type.
newtype Fix f = Fix { unFix :: f (Fix f) }
-- | Catamorphism or generic function fold.
cata :: Functor f => (f a -> a) -> (Fix f -> a)
cata f = f . fmap (cata f) . unFix
-- | Anamorphism or generic function unfold.
ana :: Functor f => (a -> f a) -> (a -> Fix f)
ana f = Fix . fmap (ana f) . f
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DeriveFunctor #-}
import Data.Functor.Foldable
type Var = String
data Exp
= Var Var
| App Exp Exp
| Lam [Var] Exp
deriving Show
data ExpF a
= VarF Var
| AppF a a
| LamF [Var] a
deriving Functor
type instance Base Exp = ExpF
instance Foldable Exp where
project (Var a) = VarF a
project (App a b) = AppF a b
project (Lam a b) = LamF a b
instance Unfoldable Exp where
embed (VarF a) = Var a
embed (AppF a b) = App a b
embed (LamF a b) = Lam a b
fvs :: Exp -> [Var]
fvs = cata phi
where phi (VarF a) = [a]
phi (AppF a b) = a ++ b
phi (LamF a b) = foldr (filter . (/=)) a b
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
import Data.Traversable
import Control.Monad hiding (forM_, mapM, sequence)
import Prelude hiding (mapM)
import qualified Data.Map as M
newtype Fix (f :: * -> *) = Fix { outF :: f (Fix f) }
-- Catamorphism
cata :: Functor f => (f a -> a) -> Fix f -> a
cata f = f . fmap (cata f) . outF
-- Monadic catamorphism
cataM :: (Traversable f, Monad m) => (f a -> m a) -> Fix f -> m a
cataM f = f <=< mapM (cataM f) . outF
data ExprF r
= EVar String
| EApp r r
| ELam r r
deriving (Show, Eq, Ord, Functor)
type Expr = Fix ExprF
instance Show (Fix ExprF) where
show (Fix f) = show f
instance Eq (Fix ExprF) where
Fix x == Fix y = x == y
instance Ord (Fix ExprF) where
compare (Fix x) (Fix y) = compare x y
mkApp :: Fix ExprF -> Fix ExprF -> Fix ExprF
mkApp x y = Fix (EApp x y)
mkVar :: String -> Fix ExprF
mkVar x = Fix (EVar x)
mkLam :: Fix ExprF -> Fix ExprF -> Fix ExprF
mkLam x y = Fix (ELam x y)
i :: Fix ExprF
i = mkLam (mkVar "x") (mkVar "x")
k :: Fix ExprF
k = mkLam (mkVar "x") $ mkLam (mkVar "y") $ (mkVar "x")
subst :: M.Map String (ExprF Expr) -> Expr -> Expr
subst env = cata alg where
alg (EVar x) | Just e <- M.lookup x env = Fix e
alg e = Fix e
|
Airtnp/Freshman_Simple_Haskell_Lib
|
Intro/WIW/Interpreter/Recursion-Schemes.hs
|
mit
| 2,566 | 0 | 12 | 678 | 1,103 | 568 | 535 | -1 | -1 |
-- Questions ?
-- on generalise (autant que possible) le type des fonctions du bloc1
myHead :: [a] -> a
myHead (x:_) = x
myTail :: [a] -> [a]
myTail (_:xs) = xs
myAppend :: [a] -> [a] -> [a]
myAppend xs ys = myAppend' xs
where --myAppend' :: [Int] -> [Int]
myAppend' (x:xs) = x:myAppend' xs
myAppend' [] = ys
myInit :: [a] -> [a]
myInit [_] = []
myInit (x:xs) = x:(myInit xs)
myLast :: [b] -> b
myLast [x] = x
myLast (_:xs) = myLast xs
myNull :: [a] -> Bool
myNull [] = True
myNull _ = False
myLength :: [a] -> Int
myLength (_:xs) = 1 + myLength xs
myLength [] = 0
myReverse :: [a] -> [a]
myReverse (x:xs) = myAppend (myReverse xs) [x]
myReverse xs = xs
myConcat :: [[a]] -> [a]
myConcat (xs:xss) = xs ++ myConcat xss
myConcat [] = []
myTake :: Int -> [a] -> [a]
myTake 0 _ = []
myTake n [] = []
myTake n (x:xs) = x:myTake (n-1) xs
myDrop :: Int -> [a] -> [a]
myDrop 0 xs = xs
myDrop n [] = []
myDrop n (x:xs) = myDrop (n-1) xs
myBangBang :: [a] -> Int -> a
myBangBang (x:xs) 0 = x
myBangBang (x:xs) n = myBangBang xs (n-1)
myInsert :: Ord a => a -> [a] -> [a]
myInsert x [] = [x]
myInsert x (y:ys) | x>y = y:myInsert x ys
| otherwise = x:y:ys
mySort :: Ord a => [a] -> [a]
mySort (x:xs) = myInsert x (mySort xs)
mySort [] = []
-- NEW STUFF
-- ordre superieur
add :: Int -> Int -> Int
add x y = x + y
myTakeWhile :: (a -> Bool) -> [a] -> [a]
myTakeWhile pred (x:xs) | pred x = x:myTakeWhile pred xs
| otherwise = []
myTakeWhile pred [] = []
-- donner le type de la fonction, notation infixe versus prefixe
myCompose :: (b -> c) -> (a -> b) -> a -> c
myCompose f g x = f (g x)
myMap :: (a -> b) -> [a] -> [b]
myMap f (x:xs) = f x:myMap f xs
myMap f [] = []
test1 = myMap odd [1..10]
-- calcul des sous liste en utilisant map
sousListes :: [a] -> [[a]]
sousListes (x:xs) = myMap (x:) (sousListes xs) ++ sousListes xs
sousListes [] = [[]]
-- une fonction plus generale: foldr
-- inferer le type de foldr
-- forme graphique de la liste en peigne
myFoldr :: (a -> b -> b) -> b -> [a] -> b
myFoldr f k (x:xs) = f x (myFoldr f k xs)
myFoldr f k [] = k
myAnd' :: [Bool] -> Bool
myAnd' = foldr (&&) True
-- definir reverse avec foldr
myReverse' :: [a] -> [a]
myReverse' = undefined
-- une parenthese sur les lambda anonymes
add' :: Int -> Int -> Int
add' x y = x + y
add'' :: Int -> Int -> Int
add'' = \x y -> x + y
-- avec foldr
myReverse'' :: [a] -> [a]
myReverse'' = undefined
-- eta reduction
myReverse''' :: [a] -> [a]
myReverse''' = undefined
-- un "nouveau type" String
s1 :: String
s1 = "tout un tas de Char"
--type String = [Char]
-- un nouveau type tuples
myFst :: (a,b) -> a
myFst (x,_) = x
-- TODO: definir recursivement
myDropWhile :: (a -> Bool) -> [a] -> [a]
myDropWhile f (x:xs) | f x = myDropWhile f xs
| otherwise = x:xs
myDropWhile f [] = []
myElem :: Eq a => a -> [a] -> Bool
myElem x [] = False
myElem x (y:ys) | x==y = True
| otherwise = myElem x (ys)
myNotElem :: Eq a => a -> [a] -> Bool
myNotElem x (y:ys) = not (myElem x (y:ys))
myFilter :: (a -> Bool) -> [a] -> [a]
myFilter f (x:xs) | f x = x:myFilter f xs
| otherwise = myFilter f xs
myFilter f [] = []
mySplitAt :: Int -> [a] -> ([a],[a])
mySplitAt x (y:ys) = (myTake x (y:ys), myDrop x (y:ys))
myZip :: [a] -> [b] -> [(a,b)]
myZip _ [] = []
myZip [] _ = []
myZip (x:xs) (y:ys) = (x,y):myZip xs ys
myZipWith :: (a -> b -> c) -> [a] -> [b] -> [c]
myZipWith f _ [] = []
myZipWith f [] _ = []
myZipWith f (x:xs) (y:ys) = (f x y):myZipWith f xs ys
myCurry :: ((a,b) -> c) -> a -> b -> c
myCurry f x y = f(x,y)
myUncurry :: (a -> b -> c) -> (a,b) -> c
myUncurry f (x,y) = f x y
myZipWith' :: (a -> b -> c) -> [a] -> [b] -> [c]
myZipWith' = undefined
myUnzip :: [(a,b)] -> ([a],[b])
myUnzip = undefined
-- TODO: redefinir en utilisant foldr
myConcat' :: [[a]] -> [a]
myConcat' = undefined
myMap' :: (a -> b) -> [a] -> [b]
myMap' = undefined
myOr' :: [Bool] -> Bool
myOr' (x:xs) = myFoldr (||) x xs
myAny :: (a -> Bool) -> [a] -> Bool
myAny = undefined
myAll :: (a -> Bool) -> [a] -> Bool
myAll = undefined
myProduct :: [Int] -> Int
myProduct (x:xs) = myFoldr (*) x xs
-- TODO: calculuer les 50 plus petits nombres premiers 2, 3, 5, 7, 11...
premiers :: [Int]
premiers = [2..]
test2 = take 50 (crible premiers)
where crible (n:ns) = n:crible (filter (\x -> x `mod` n /= 0) ns)
|
romanlp/mines-haskell
|
tp/TP2.hs
|
mit
| 4,487 | 18 | 14 | 1,164 | 2,417 | 1,303 | 1,114 | 126 | 2 |
{-#LANGUAGE GADTSyntax, RankNTypes, MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances, UndecidableInstances#-}
module Typeclass.TermAlgebra(TermAlgebra(var, con)) where
class Functor f => TermAlgebra h f | h -> f where
var :: forall a . a -> h a
con :: forall a . f (h a) -> h a
|
MichielDeCuyper/Algebraic-Effect-Handlers
|
src/Typeclass/TermAlgebra.hs
|
mit
| 304 | 0 | 11 | 53 | 85 | 47 | 38 | 8 | 0 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, MultiParamTypeClasses #-}
{- |
Module : ./QVTR/Logic_QVTR.hs
Description : Instance of class Logic for the QVTR logic
Copyright : (c) Daniel Calegari Universidad de la Republica, Uruguay 2013
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
-}
module QVTR.Logic_QVTR where
import QVTR.As
import QVTR.Sign
import QVTR.Print ()
import QVTR.StatAna
import QVTR.ATC_QVTR ()
import Logic.Logic
import Common.DefaultMorphism
import Data.Monoid
data QVTR = QVTR deriving Show
instance Language QVTR where
description _ = "OMG's QVT-Relations transformation, a language for the specification of model transformations"
type Morphism = DefaultMorphism Sign
-- QVTR logic
instance Monoid Transformation where
mempty = error "Not implemented!"
mappend _ _ = error "Not implemented!"
instance Sentences QVTR
Sen
Sign
Morphism
()
where
map_sen QVTR _ = return
instance Syntax QVTR
Transformation
()
()
()
instance Logic QVTR
() -- Sublogics
Transformation -- basic_spec
Sen -- sentence
() -- symb_items
() -- symb_map_items
Sign -- sign
Morphism -- morphism
() -- symbol
() -- raw_symbol
() -- proof_tree
where
stability QVTR = Experimental
empty_proof_tree _ = ()
instance StaticAnalysis QVTR
Transformation -- basic_spec
Sen -- sentence
() -- symb_items
() -- symb_map_items
Sign -- sign
Morphism -- morphism
() -- symbol
() -- raw_symbol
where
basic_analysis QVTR = Just basicAna
empty_signature QVTR = emptySign
is_subsig QVTR _ _ = True
subsig_inclusion QVTR = defaultInclusion
induced_from_morphism _ _ sig = return $ MkMorphism sig sig
signature_union QVTR sign1 _ = return sign1 -- TODO
|
spechub/Hets
|
QVTR/Logic_QVTR.hs
|
gpl-2.0
| 2,078 | 0 | 7 | 614 | 352 | 192 | 160 | 56 | 0 |
module GUI where
import Graphics.UI.Gtk
import Expr
import Lit
import Parsing
import Data.Either
import Helper
main :: IO ()
main = do
initGUI
window <- windowNew
set window [windowTitle := "Calculator", containerBorderWidth := 5,
windowDefaultWidth := 400, windowDefaultHeight := 500]
table <- tableNew 5 4 True
containerAdd window table
txtstack <- labelNew (Nothing::Maybe String)
tableAttachDefaults table txtstack 0 4 0 1
clearButton <- buttonNewWithLabel "C"
onClicked clearButton (clearLabel txtstack)
tableAttachDefaults table clearButton 1 2 4 5
equalsButton <- buttonNewWithLabel "="
onClicked equalsButton (equalsHandler txtstack)
tableAttachDefaults table equalsButton 2 3 4 5
-- Digits
button0 <- buttonNewWithLabel "0"
onClicked button0 (switchOpNum button0 txtstack)
tableAttachDefaults table button0 0 1 4 5
button1 <- buttonNewWithLabel "1"
onClicked button1 (switchOpNum button1 txtstack)
tableAttachDefaults table button1 0 1 1 2
button2 <- buttonNewWithLabel "2"
onClicked button2 (switchOpNum button2 txtstack)
tableAttachDefaults table button2 1 2 1 2
button3 <- buttonNewWithLabel "3"
onClicked button3 (switchOpNum button3 txtstack)
tableAttachDefaults table button3 2 3 1 2
button4 <- buttonNewWithLabel "4"
onClicked button4 (switchOpNum button4 txtstack)
tableAttachDefaults table button4 0 1 2 3
button5 <- buttonNewWithLabel "5"
onClicked button5 (switchOpNum button5 txtstack)
tableAttachDefaults table button5 1 2 2 3
button6 <- buttonNewWithLabel "6"
onClicked button6 (switchOpNum button6 txtstack)
tableAttachDefaults table button6 2 3 2 3
button7 <- buttonNewWithLabel "7"
onClicked button7 (switchOpNum button7 txtstack)
tableAttachDefaults table button7 0 1 3 4
button8 <- buttonNewWithLabel "8"
onClicked button8 (switchOpNum button8 txtstack)
tableAttachDefaults table button8 1 2 3 4
button9 <- buttonNewWithLabel "9"
onClicked button9 (switchOpNum button9 txtstack)
tableAttachDefaults table button9 2 3 3 4
-- Operations
addButton <- buttonNewWithLabel "+"
onClicked addButton (switchOpNum addButton txtstack)
tableAttachDefaults table addButton 3 4 1 2
subtrButton <- buttonNewWithLabel "-"
onClicked subtrButton (switchOpNum subtrButton txtstack)
tableAttachDefaults table subtrButton 3 4 2 3
multButton <- buttonNewWithLabel "*"
onClicked multButton (switchOpNum multButton txtstack)
tableAttachDefaults table multButton 3 4 3 4
divButton <- buttonNewWithLabel "/"
onClicked divButton (switchOpNum divButton txtstack)
tableAttachDefaults table divButton 3 4 4 5
onDestroy window mainQuit
widgetShowAll window
mainGUI
-- Event handlers
-- This function takes the text from the button and put it on the label
switchOpNum :: Button -> Label -> IO ()
switchOpNum b l = do label <- get b buttonLabel
strin <- labelGetLabel l
-- Due to bug in the recent Gtk2Hs version, those labels has to be cased to string
labelSetText l ((strin::String) ++ (label::String))
-- Clears the given label
clearLabel :: Label -> IO ()
clearLabel l = labelSetText l ""
-- | The 'fromRight' function extracts the element out of a 'Right' and
-- throws an error if its argument take the form @Left _@.
fromRight :: Either a b -> b
fromRight (Left _) = error "Either.Unwrap.fromRight: Argument takes form 'Left _'" -- yuck
fromRight (Right x) = x
-- Evaluates the expression, converts it to the int, and puts it on the label
equalsHandler :: Label -> IO ()
equalsHandler l =
do txt <- labelGetLabel l
clearLabel l
case parse pCommand txt of
[(cmd, "")] ->
case cmd of
Eval e -> labelSetText l ((txt::String) ++ " = " ++ (showLit $ fromRight $ eval Empty e))
_ -> labelSetText l ""
|
MaximKN/Haskell1
|
src/GUI.hs
|
gpl-3.0
| 3,914 | 0 | 18 | 836 | 1,116 | 511 | 605 | 86 | 2 |
module System.Console.RemoteCLI.CommandLine (
CommandLine (..)
, Scope (..)
, Option (..)
, Value (..)
, fromString
, toString
) where
import Text.ParserCombinators.Parsec
import Control.Monad.Writer (Writer, execWriter, tell)
import Data.Char (isSpace)
import Data.List (dropWhileEnd)
import Control.Applicative ((<$>), (<*>), (*>))
type Identifier = String
-- | Algebraic data structure representing the internal format,
-- i.e. parsed, of the command line
data CommandLine = CommandLine Scope Identifier [Option]
deriving (Show, Eq)
-- | Tag to tell whether the local or the default scope is requested
data Scope = Local | Default
deriving (Show, Eq)
-- | Tag to describe an option
data Option = Option Identifier (Maybe Value)
deriving (Show, Eq)
-- | A value to an option
data Value = Null
| Bool Bool
| Int Int
| String String
deriving (Show, Eq)
-- | Converts a command line string to the internal format
fromString :: String -> Either [String] CommandLine
fromString s =
case runParser lineParser () "" (stripEnd s) of
Right commandLine -> Right commandLine
Left e -> Left $ lines (show e)
-- | Converts the internal format to a string
toString :: CommandLine -> String
toString cl = execWriter $ serialize cl
-- | Top level parser for the command line
lineParser :: Parser CommandLine
lineParser = CommandLine <$> scope <*> identifier <*> many anOption
-- | Parse the scope
scope :: Parser Scope
scope = spaces *> option Default (char ':' *> return Local)
-- | Parse an identifier
identifier :: Parser String
identifier = spaces *> ((:) <$> oneOf beginners <*> many (oneOf followers))
where
beginners = ['a'..'z'] ++ ['A'..'Z']
followers = beginners ++ ['0'..'9'] ++ "_-"
-- | Parse an option
anOption :: Parser Option
anOption = spaces *> (Option <$> identifier <*> value)
-- | Parse a value
value :: Parser (Maybe Value)
value = spaces *> ((char '=' *> determineValue)
<|> return Nothing)
where
determineValue = try valueNull
<|> try valueBool
<|> try valueInt
<|> try valueString
<?> "A valid type"
-- | Parsing a Null literal
valueNull :: Parser (Maybe Value)
valueNull = spaces *> string "Null" *> return (Just Null)
-- | Parsing a bool literal
valueBool :: Parser (Maybe Value)
valueBool = spaces *>
(Just <$> (Bool . read) <$> (string "True" <|> string "False"))
-- | Parsing an int literal
valueInt :: Parser (Maybe Value)
valueInt = spaces *> (Just <$> (Int . read) <$> num)
where
num = (:) <$> sign <*> digits
sign = option ' ' (char '-')
digits = many1 digit
-- | Parsing a string literal
valueString :: Parser (Maybe Value)
valueString = spaces *>
(Just <$> String <$> between quote quote (many $ noneOf "\""))
where
quote = char '\"'
-- | Serialize the command line to a string
serialize :: CommandLine -> Writer String ()
serialize (CommandLine s c os) = do
serializeScope s
serializeIdentity c
mapM_ serializeOption os
where
serializeScope Local = tell ":" >> blank
serializeScope Default = return ()
serializeIdentity i = tell i >> blank
serializeOption (Option i p) = do
serializeIdentity i
serializeParameter p
serializeParameter Nothing = return ()
serializeParameter (Just Null) = tell "= Null" >> blank
serializeParameter (Just (Bool b)) = assign b >> blank
serializeParameter (Just (Int n)) = assign n >> blank
serializeParameter (Just (String s')) = assign s' >> blank
assign x = tell $ "= " ++ show x
blank = tell " "
-- | Strip spaces from the end of the string
stripEnd :: String -> String
stripEnd = dropWhileEnd isSpace
|
SneakingCat/rcli
|
src/System/Console/RemoteCLI/CommandLine.hs
|
gpl-3.0
| 3,937 | 0 | 11 | 1,062 | 1,118 | 591 | 527 | 83 | 6 |
module Chapter8_recursion where
import Data.List (intersperse)
mc91 :: Int -> Int
mc91 n
| n > 100 = n - 10
| otherwise = mc91 . mc91 $ n + 11
digitToWord :: Int -> String
digitToWord 1 = "one"
digitToWord 2 = "two"
digitToWord 3 = "three"
digitToWord 4 = "four"
digitToWord 5 = "five"
digitToWord 6 = "six"
digitToWord 7 = "seven"
digitToWord 8 = "eight"
digitToWord 9 = "nine"
digitToWord 0 = "zero"
digitToWord _ = error ">10"
digits :: Int -> [Int]
digits i = go i [] where
go n a
| n < 10 = n : a
| otherwise = go (n `div` 10) (n `mod` 10 : a)
wordNumber :: Int -> String
wordNumber n = concat . intersperse "-" . map digitToWord $ digits n
|
maruks/haskell-book
|
src/Chapter8_recursion.hs
|
gpl-3.0
| 665 | 0 | 11 | 153 | 293 | 148 | 145 | 25 | 1 |
{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-}
module HipSpec.Lang.PolyFOL.Types where
import Data.Foldable (Foldable)
import Data.Traversable (Traversable)
{-# ANN module "HLint: ignore Use camelCase" #-}
-- | Clauses.
--
-- The a type variable is used for many different identifiers:
-- * Quantified variables
-- * Type variables
-- * Function and predicate symbols (Nick: constants (or simply symbols))
-- * Type constructor symbols
data Clause a b
= SortSig
{ sig_id :: a
-- ^ Symbol this signature is for
, sort_args :: Int
-- ^ Number of kind arguments, see Note [Simple Kinded Sorts]
}
| TypeSig
{ sig_id :: a
-- ^ Symbol this signature is for
, ty_vars :: [b]
-- ^ Type variables
, sig_args :: [Type a b]
-- ^ Types of the arguments
, sig_res :: Type a b
-- ^ Result type for this identifer
}
| Clause
{ cl_name :: Maybe Int
-- ^ Name for this clause to get unsatisfiable cores
, cl_ty_triggers :: [Trigger a]
-- ^ What things trigger the instantiation of this clause?
-- For function definitions, the function causes it
-- For data type-related definitions, the type constructor and
-- its data constructors do
, cl_type :: ClType
-- ^ Axiom, conjecture...
, ty_vars :: [b]
-- ^ Top-level type variables
, cl_formula :: Formula a b
-- ^ Formula in this clause
}
| Comment
{ comment :: String
-- ^ A comment.
}
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
data Trigger a
= TySymb a
-- ^ Needs to be first!
| Symb a
| Source
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
data ClType
= Axiom
-- ^ Axioms, or definitions, or hypothesis or negated conjectures
-- Is it important to distinguish between these?
| Goal
-- ^ Conjecture
deriving (Eq,Ord,Show)
data Type a b
= TyCon a [Type a b]
| TyVar b
| TType
-- ^ The type of types
| Integer
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
-- | Term operations
data TOp = Equal | Unequal
deriving (Eq,Ord,Show)
-- | Formula operations
data FOp = And | Or | Implies | Equiv
deriving (Eq,Ord,Show)
-- | Quantifier operations
data Q = Forall | Exists
deriving (Eq,Ord,Show)
data Formula a b
= TOp TOp (Term a b) (Term a b)
-- ^ Equality and inequality
| FOp FOp (Formula a b) (Formula a b)
-- ^ Logical connectives
| Neg (Formula a b)
-- ^ Negation
| Q Q b (Type a b) (Formula a b)
-- ^ Quantification
{-
| Pred a [Formula a b]
-- ^ Predication
-}
| DataDecl [DataDecl a b] (Formula a b)
-- ^ One or many data declarations for SMT data types,
-- or a formula if it is not supported
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
data DataDecl a b = Data a [Type a b] [(a,[(a,Type a b)])]
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
-- | Terms.
data Term a b
= Apply a [Type a b] [Term a b]
-- ^ Symbol applied to arguments (can be empty)
| Var b
-- ^ Quantified variable
| Lit Integer
-- ^ An integer
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
|
danr/hipspec
|
src/HipSpec/Lang/PolyFOL/Types.hs
|
gpl-3.0
| 3,294 | 0 | 11 | 943 | 726 | 428 | 298 | 58 | 0 |
module KRPCHS.Internal.NetworkUtils
( helloMsg
, helloStreamMsg
, connNameMsg
, recvN
, recvId
, recvMsg
, sendMsg
) where
import KRPCHS.Internal.SerializeUtils
import Control.Monad
import Network.Socket hiding (send, recv, sendTo, recvFrom)
import Network.Socket.ByteString
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Char8 as BC
import Data.Bits
import Data.Word
import Text.ProtocolBuffers
helloMsg :: BC.ByteString
helloMsg = BC.pack "HELLO-RPC\x0\x0\x0"
helloStreamMsg :: BC.ByteString
helloStreamMsg = BC.pack "HELLO-STREAM"
connNameMsg :: String -> BC.ByteString
connNameMsg name = BC.pack $ take 32 $ name ++ padding
where padding = repeat '\x0'
recvN :: Socket -> Int -> IO BS.ByteString
recvN sock n =
let
recvN' numBytesRead bsList | numBytesRead == n =
return $ BS.concat $ reverse bsList
recvN' numBytesRead bsList = do
bytes <- recv sock (n - numBytesRead)
when (BS.length bytes == 0) $ fail "Read error"
recvN' (numBytesRead + BS.length bytes) (bytes : bsList)
in
recvN' 0 []
recvId :: Socket -> IO BS.ByteString
recvId sock = recvN sock 16
recvMsg :: Socket -> IO BS.ByteString
recvMsg sock = do
sz <- recvSize BS.empty
recvN sock (fromIntegral sz)
where
recvSize :: BS.ByteString -> IO Word64
recvSize sz
| BC.length sz > 10 = fail "Malformed message"
| otherwise = do
b <- recv sock 1
let sz' = BS.append sz b
more = testBit (BS.head b) 7
if more then recvSize sz'
else either (fail "Malformed message") (return) (decodePb $ BL.fromStrict sz')
sendMsg :: (ReflectDescriptor msg, Wire msg) => Socket -> msg -> IO ()
sendMsg sock msg = sendAll sock (BL.toStrict $ messageWithLengthPut msg)
|
Cahu/krpc-hs
|
src/KRPCHS/Internal/NetworkUtils.hs
|
gpl-3.0
| 1,937 | 0 | 16 | 513 | 617 | 316 | 301 | 52 | 2 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Monad.Trans
import Control.Monad (forM_)
import Data.List
import Data.Monoid
import Hakyll
import Hakyll.Web.Paginate
import Hakyll.Web.Sass
import Schnizle.Config
import Schnizle.Fields
import Schnizle.Haml
import Schnizle.Indexed
postsPerPage :: Int
postsPerPage = 10
main :: IO ()
main = hakyllWith config $ do
tags <- buildTags "posts/*.md" $ fromCapture "category/*/index.html"
pages <- buildBlogPages "posts/*.md"
related <- buildRelatedLinks "posts/*.md"
-- prepare templates
match ("templates/*.haml" .||. "templates/static/*.haml") $ compile hamlCompiler
match ("templates/*.xml" .||. "templates/*.html") $ compile templateCompiler
-- render css
match "assets/css/*.sass" $ do
route $ setExtension "css"
compile sassCompiler
-- copy static asserts
match ("assets/img/**" .||. "assets/js/**" .||. "assets/css/*.css") $ do
route idRoute
compile copyFileCompiler
-- create blog ----------------------------------------------------------------
match "posts/*.md" $ do
route $ indexedRouteWith "blog"
compile $ pandocCompilerWith defaultHakyllReaderOptions pandocOptions
>>= loadAndApplyTemplate "templates/post.haml" (postCtx tags related)
>>= loadAndApplyTemplate "templates/layout.haml" (postCtx tags related)
>>= relativizeIndexed
paginateRules pages $ \index pattern -> do
route idRoute
compile $ makeItem ""
>>= loadAndApplyTemplate "templates/blog.haml" (blogCtx index pages tags related)
>>= loadAndApplyTemplate "templates/layout.haml" (blogCtx index pages tags related)
>>= relativizeIndexed
-- sitemap --------------------------------------------------------------------
create ["sitemap.xml"] $ do
route idRoute
compile $ makeItem ""
>>= loadAndApplyTemplate "templates/sitemap.xml" (sitemapCtx tags related)
>>= relativizeIndexed
-- feed ------------------------------------------------------------------------
create ["feed.xml"] $ do
route idRoute
compile $ (recentFirst =<< loadAll "posts/*.md")
>>= renderRss feedConfig defaultContext
-- create static pages ---------------------------------------------------------
create ["index.html"] $ do
route idRoute
compile $ makeItem ""
>>= loadAndApplyTemplate "templates/index.haml" defaultContext
>>= loadAndApplyTemplate "templates/layout.haml" defaultContext
>>= relativizeIndexed
forM_ ["about", "notice", "mail"] $ \file ->
create [fromFilePath $ file ++ "/index.html"] $ do
route idRoute
compile $ makeItem ""
>>= loadAndApplyTemplate ( fromFilePath $ "templates/static/" ++ file ++ ".haml") defaultContext
>>= loadAndApplyTemplate "templates/layout.haml" defaultContext
>>= relativizeIndexed
-- blog --------------------------------------------------------------------------
buildBlogPages :: (MonadMetadata m) => Pattern -> m Paginate
buildBlogPages pattern = buildPaginateWith (return . paginateEvery postsPerPage) pattern $ \index ->
if index == 1
then fromFilePath "blog/index.html"
else fromFilePath $ "blog/" ++ show index ++ "/index.html"
postCtx :: Tags -> RelatedLinks -> Context String
postCtx tags related = defaultContext
<> tagsField "tags" tags
<> constField "keywords" tagList
<> dateField "date" "%B %d, %Y"
<> dateField "day" "%d"
<> dateField "month" "%m"
<> dateField "year" "%Y"
<> dateField "created" "%Y-%m-%d"
<> relatedLinksField 2 "related" related defaultRelatedContext
<> additionalLinksField "links"
<> modificationTimeField "modified" "%Y-%m-%d"
where
tagList = intercalate "," $ map fst $ tagsMap tags
blogCtx :: PageNumber -> Paginate -> Tags -> RelatedLinks -> Context String
blogCtx i pages tags related = defaultContext
<> constField "title" "what ever comes to mind"
<> listField "posts" (postCtx tags related) posts
<> modificationTimeField "modified" "%Y-%m-%d"
<> paginateContext pages i
where
posts = takeFromTo <$> (recentFirst =<< loadAll "posts/*.md")
takeFromTo = drop start . take end
start = postsPerPage * (i - 1)
end = postsPerPage * i
sitemapCtx :: Tags -> RelatedLinks -> Context String
sitemapCtx tags related = defaultContext
<> listField "posts" (postCtx tags related) (recentFirst =<< loadAll "posts/*.md")
<> nowField "created" "%Y-%m-%d"
|
felixsch/schnizle
|
schnizle.hs
|
gpl-3.0
| 4,578 | 0 | 20 | 936 | 1,063 | 508 | 555 | 93 | 2 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TupleSections #-}
-- |
-- Module : Aura.Dependencies
-- Copyright : (c) Colin Woodbury, 2012 - 2020
-- License : GPL3
-- Maintainer: Colin Woodbury <[email protected]>
--
-- Library for handling package dependencies and version conflicts.
module Aura.Dependencies ( resolveDeps ) where
import Algebra.Graph.AdjacencyMap
import Algebra.Graph.AdjacencyMap.Algorithm (scc)
import qualified Algebra.Graph.NonEmpty.AdjacencyMap as NAM
import Algebra.Graph.ToGraph (isAcyclic)
import Aura.Core
import Aura.IO
import Aura.Languages
import Aura.Settings
import Aura.Types
import Aura.Utils
import Data.Versions hiding (Lens')
import Lens.Micro
import RIO
import qualified RIO.Map as M
import qualified RIO.NonEmpty as NEL
import qualified RIO.Set as S
import qualified RIO.Text as T
---
-- | The results of dependency resolution.
data Resolution = Resolution
{ toInstall :: Map PkgName Package
, satisfied :: Set PkgName }
deriving (Generic)
toInstallL :: Lens' Resolution (Map PkgName Package)
toInstallL f r = (\m -> r { toInstall = m }) <$> f (toInstall r)
satisfiedL :: Lens' Resolution (Set PkgName)
satisfiedL f r = (\s -> r { satisfied = s }) <$> f (satisfied r)
-- | Given some `Package`s, determine its full dependency graph.
-- The graph is collapsed into layers of packages which are not
-- interdependent, and thus can be built and installed as a group.
--
-- Deeper layers of the result list (generally) depend on the previous layers.
resolveDeps :: Repository -> NonEmpty Package -> RIO Env (NonEmpty (NonEmpty Package))
resolveDeps repo ps = do
ss <- asks settings
res <- liftIO $ (Just <$> resolveDeps' ss repo ps) `catchAny` const (pure Nothing)
Resolution m s <- maybe (throwM $ Failure connectFailure_1) pure res
unless (length ps == length m) $ putText "\n"
let de = conflicts ss m s
unless (null de) . throwM . Failure $ missingPkg_2 de
either throwM pure $ sortInstall m
-- | Solve dependencies for a set of `Package`s assumed to not be
-- installed/satisfied.
resolveDeps' :: Settings -> Repository -> NonEmpty Package -> IO Resolution
resolveDeps' ss repo ps = resolve (Resolution mempty mempty) ps
where
-- | Only searches for packages that we haven't checked yet.
resolve :: Resolution -> NonEmpty Package -> IO Resolution
resolve r@(Resolution m _) xs = maybe' (pure r) (NEL.nonEmpty goods) $ \goods' -> do
let m' = M.fromList . map (pname &&& id) $ toList goods'
r' = r & toInstallL %~ (<> m')
these (const $ pure r') (satisfy r') (const $ satisfy r') $ dividePkgs goods'
where
goods :: [Package]
goods = NEL.filter (\p -> not $ pname p `M.member` m) xs
-- | All dependencies from all potential `Buildable`s.
allDeps :: NonEmpty Buildable -> Set Dep
allDeps = foldMap1 (S.fromList . (^.. to bDeps . each))
-- | Deps which are not yet queued for install.
freshDeps :: Resolution -> Set Dep -> Set Dep
freshDeps (Resolution m s) = S.filter f
where
f :: Dep -> Bool
f d = let n = dName d in not $ M.member n m || S.member n s
-- | Consider only "unsatisfied" deps.
satisfy :: Resolution -> NonEmpty Buildable -> IO Resolution
satisfy r bs = maybe' (pure r) (nes . freshDeps r $ allDeps bs) $
areSatisfied >=> these (lookups r) (pure . r') (\uns sat -> lookups (r' sat) uns)
where
r' :: Satisfied -> Resolution
r' (Satisfied sat) = r & satisfiedL %~ (<> f sat)
-- | Unique names of some dependencies.
f :: NonEmpty Dep -> Set PkgName
f = S.fromList . NEL.toList . NEL.map dName
-- TODO What about if `repoLookup` reports deps that don't exist?
-- i.e. the left-hand side of the tuple.
-- | Lookup unsatisfied deps and recurse the entire lookup process.
lookups :: Resolution -> Unsatisfied -> IO Resolution
lookups r (Unsatisfied ds) = do
let names = NEL.map dName ds
repoLookup repo ss names >>= \case
Nothing -> throwString "AUR Connection Error"
Just (_, could) -> case nes could of
Nothing -> throwString "Non-existant deps"
Just goods -> resolve r goods
conflicts :: Settings -> Map PkgName Package -> Set PkgName -> [DepError]
conflicts ss m s = foldMap f m
where
pm :: Map PkgName Package
pm = M.fromList $ map (\p -> (provides $ pprov p, p)) $ toList m
f :: Package -> [DepError]
f (FromRepo _) = []
f (FromAUR b) = flip mapMaybe (bDeps b) $ \d ->
let dn = dName d
-- Don't do conflict checks for deps which are known to be satisfied on
-- the system.
in if S.member dn s then Nothing
else case M.lookup dn m <|> M.lookup dn pm of
Nothing -> Just . NonExistant dn $ bName b
Just p -> realPkgConflicts ss (bName b) p d
sortInstall :: Map PkgName Package -> Either Failure (NonEmpty (NonEmpty Package))
sortInstall m = case cycles depGraph of
[] -> note (Failure missingPkg_3) . NEL.nonEmpty . mapMaybe nes $ batch depGraph
cs -> Left . Failure . missingPkg_4 $ map (NEL.map pname . NAM.vertexList1) cs
where
f :: Package -> [(Package, Package)]
f (FromRepo _) = []
f p@(FromAUR b) = mapMaybe (\d -> fmap (p,) $ dName d `M.lookup` m)
$ bDeps b -- TODO handle "provides"?
depGraph = overlay connected singles
elems = M.elems m
connected = edges $ foldMap f elems
singles = overlays $ map vertex elems
cycles :: Ord a => AdjacencyMap a -> [NAM.AdjacencyMap a]
cycles = filter (not . isAcyclic) . vertexList . scc
-- | Find the vertices that have no dependencies.
-- O(n) complexity.
leaves :: Ord a => AdjacencyMap a -> Set a
leaves x = S.filter (null . flip postSet x) $ vertexSet x
-- | Split a graph into batches of mutually independent vertices.
-- Probably O(m * n * log(n)) complexity.
batch :: Ord a => AdjacencyMap a -> [Set a]
batch g | isEmpty g = []
| otherwise = ls : batch (induce (`S.notMember` ls) g)
where ls = leaves g
-- | Questions to be answered in conflict checks:
-- 1. Is the package ignored in `pacman.conf`?
-- 2. Is the version requested different from the one provided by
-- the most recent version?
realPkgConflicts :: Settings -> PkgName -> Package -> Dep -> Maybe DepError
realPkgConflicts ss parent pkg dep
| pn `elem` toIgnore = Just $ Ignored failMsg1
| isVersionConflict reqVer curVer = Just $ VerConflict failMsg2
| otherwise = Nothing
where pn = pname pkg
curVer = pver pkg & release .~ []
reqVer = dDemand dep & _VersionDemand . release .~ []
lang = langOf ss
toIgnore = ignoresOf ss
failMsg1 = getRealPkgConflicts_2 pn lang
failMsg2 = getRealPkgConflicts_1 parent pn (prettyV curVer) (T.pack $ show reqVer) lang
-- | Compares a (r)equested version number with a (c)urrent up-to-date one.
-- The `MustBe` case uses regexes. A dependency demanding version 7.4
-- SHOULD match as `okay` against version 7.4, 7.4.0.1, or even 7.4.0.1-2.
isVersionConflict :: VersionDemand -> Versioning -> Bool
isVersionConflict Anything _ = False
isVersionConflict (LessThan r) c = c >= r
isVersionConflict (MoreThan r) c = c <= r
isVersionConflict (MustBe r) c = c /= r
isVersionConflict (AtLeast r) c = c < r
|
bb010g/aura
|
aura/lib/Aura/Dependencies.hs
|
gpl-3.0
| 7,513 | 0 | 18 | 1,881 | 2,227 | 1,140 | 1,087 | -1 | -1 |
-----------------------------------------------------------------------------
--
-- Module : Cmd_arguments
-- Copyright : (c) hokum
-- License : GPL3
--
-- Maintainer :
-- Stability : experimental
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module Cmd_arguments (
flags ,
options,
tag_DMap,
list_arguments,
InputArguments(..),
inputArgs,
) where
import qualified Data.Map as DMap
data InputArguments = InputArguments {
json_import_file :: Maybe FilePath
,stl_export_file :: Maybe FilePath
,mesh_quality :: Maybe Double
,overall_union_rounding :: Maybe Double
}
{-- ================================================================================================
================================================================================================ --}
inputArgs :: DMap.Map String String -> InputArguments
inputArgs tm = InputArguments {
json_import_file = argument argument_json_import_file default_json_import_file
,stl_export_file = argument argument_stl_export_file default_stl_export_file
,mesh_quality = float_argument argument_mesh_quality default_mesh_quality
,overall_union_rounding = float_argument argument_overall_union_rounding default_overall_union_rounding
}
where
file_argument :: String -> String -> Maybe FilePath
file_argument argument_name default_value = argument argument_name default_value
--float_argument :: String -> String -> Maybe Float
float_argument argument_name default_value = try_read $ argument argument_name default_value
where
try_read Nothing = Nothing
try_read (Just "") = Nothing
try_read (Just s) = Just $ read s
argument :: String -> String -> Maybe String
argument argument_name default_value
-- |s/= default_value = Just s
|s== "" = Nothing
|otherwise = Just s
where
s = (DMap.findWithDefault default_value argument_name tm)
argument_json_import_file = "json-import-file"
default_json_import_file = ""
argument_stl_export_file = "stl-export-file"
default_stl_export_file = ""
argument_mesh_quality = "mesh-quality"
default_mesh_quality = "1"
argument_overall_union_rounding = "overall-union-rounding"
default_overall_union_rounding = "0"
flags = [
]
options = [
argument_json_import_file
,argument_stl_export_file
,argument_mesh_quality
,argument_overall_union_rounding
]
{-- ================================================================================================
================================================================================================ --}
tag_DMap:: [String] -> DMap.Map String String
tag_DMap [] = DMap.fromList [
--("",""),
(argument_json_import_file, default_json_import_file)
,(argument_stl_export_file, default_stl_export_file)
,(argument_mesh_quality, default_mesh_quality)
,(argument_overall_union_rounding, default_overall_union_rounding)
]----]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]
tag_DMap lst = DMap.union (DMap.fromList $ map (\(Just x) -> x) $ list_arguments lst) $
tag_DMap []
----------------------------------------------------------------------
{-- ================================================================================================
================================================================================================ --}
list_arguments :: [String] -> [Maybe (String, String)]
list_arguments [] = []
list_arguments (tag:rest)
| take 2 tag == "--" && elem tag' flags =
(Just (tag', "true")) : list_arguments rest
| take 2 tag == "--" && elem tag' options =
(Just (tag', after_tag)) : list_arguments rest'
|otherwise = list_arguments rest
where
after_tag = head rest
tag' = (drop 2 tag)
rest'
|rest /= [] = tail rest
|otherwise = []
rest''
|rest' /= [] = tail rest'
|otherwise = []
----------------------------------------------------
|
Collocalini/GenImplicit
|
genimplicit/src/Cmd_arguments.hs
|
gpl-3.0
| 4,197 | 0 | 14 | 827 | 764 | 411 | 353 | 68 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Content.Shoppingadsprogram.Requestreview
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Requests a review for Shopping Ads program in the provided country.
--
-- /See:/ <https://developers.google.com/shopping-content/v2/ Content API for Shopping Reference> for @content.shoppingadsprogram.requestreview@.
module Network.Google.Resource.Content.Shoppingadsprogram.Requestreview
(
-- * REST Resource
ShoppingadsprogramRequestreviewResource
-- * Creating a Request
, shoppingadsprogramRequestreview
, ShoppingadsprogramRequestreview
-- * Request Lenses
, srXgafv
, srMerchantId
, srUploadProtocol
, srAccessToken
, srUploadType
, srPayload
, srCallback
) where
import Network.Google.Prelude
import Network.Google.ShoppingContent.Types
-- | A resource alias for @content.shoppingadsprogram.requestreview@ method which the
-- 'ShoppingadsprogramRequestreview' request conforms to.
type ShoppingadsprogramRequestreviewResource =
"content" :>
"v2.1" :>
Capture "merchantId" (Textual Int64) :>
"shoppingadsprogram" :>
"requestreview" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] RequestReviewShoppingAdsRequest :>
Post '[JSON] ()
-- | Requests a review for Shopping Ads program in the provided country.
--
-- /See:/ 'shoppingadsprogramRequestreview' smart constructor.
data ShoppingadsprogramRequestreview =
ShoppingadsprogramRequestreview'
{ _srXgafv :: !(Maybe Xgafv)
, _srMerchantId :: !(Textual Int64)
, _srUploadProtocol :: !(Maybe Text)
, _srAccessToken :: !(Maybe Text)
, _srUploadType :: !(Maybe Text)
, _srPayload :: !RequestReviewShoppingAdsRequest
, _srCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ShoppingadsprogramRequestreview' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'srXgafv'
--
-- * 'srMerchantId'
--
-- * 'srUploadProtocol'
--
-- * 'srAccessToken'
--
-- * 'srUploadType'
--
-- * 'srPayload'
--
-- * 'srCallback'
shoppingadsprogramRequestreview
:: Int64 -- ^ 'srMerchantId'
-> RequestReviewShoppingAdsRequest -- ^ 'srPayload'
-> ShoppingadsprogramRequestreview
shoppingadsprogramRequestreview pSrMerchantId_ pSrPayload_ =
ShoppingadsprogramRequestreview'
{ _srXgafv = Nothing
, _srMerchantId = _Coerce # pSrMerchantId_
, _srUploadProtocol = Nothing
, _srAccessToken = Nothing
, _srUploadType = Nothing
, _srPayload = pSrPayload_
, _srCallback = Nothing
}
-- | V1 error format.
srXgafv :: Lens' ShoppingadsprogramRequestreview (Maybe Xgafv)
srXgafv = lens _srXgafv (\ s a -> s{_srXgafv = a})
-- | Required. The ID of the account.
srMerchantId :: Lens' ShoppingadsprogramRequestreview Int64
srMerchantId
= lens _srMerchantId (\ s a -> s{_srMerchantId = a})
. _Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
srUploadProtocol :: Lens' ShoppingadsprogramRequestreview (Maybe Text)
srUploadProtocol
= lens _srUploadProtocol
(\ s a -> s{_srUploadProtocol = a})
-- | OAuth access token.
srAccessToken :: Lens' ShoppingadsprogramRequestreview (Maybe Text)
srAccessToken
= lens _srAccessToken
(\ s a -> s{_srAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
srUploadType :: Lens' ShoppingadsprogramRequestreview (Maybe Text)
srUploadType
= lens _srUploadType (\ s a -> s{_srUploadType = a})
-- | Multipart request metadata.
srPayload :: Lens' ShoppingadsprogramRequestreview RequestReviewShoppingAdsRequest
srPayload
= lens _srPayload (\ s a -> s{_srPayload = a})
-- | JSONP
srCallback :: Lens' ShoppingadsprogramRequestreview (Maybe Text)
srCallback
= lens _srCallback (\ s a -> s{_srCallback = a})
instance GoogleRequest
ShoppingadsprogramRequestreview
where
type Rs ShoppingadsprogramRequestreview = ()
type Scopes ShoppingadsprogramRequestreview =
'["https://www.googleapis.com/auth/content"]
requestClient ShoppingadsprogramRequestreview'{..}
= go _srMerchantId _srXgafv _srUploadProtocol
_srAccessToken
_srUploadType
_srCallback
(Just AltJSON)
_srPayload
shoppingContentService
where go
= buildClient
(Proxy ::
Proxy ShoppingadsprogramRequestreviewResource)
mempty
|
brendanhay/gogol
|
gogol-shopping-content/gen/Network/Google/Resource/Content/Shoppingadsprogram/Requestreview.hs
|
mpl-2.0
| 5,602 | 0 | 19 | 1,298 | 810 | 469 | 341 | 118 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.BigtableAdmin.Projects.Instances.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets information about an instance.
--
-- /See:/ <https://cloud.google.com/bigtable/ Cloud Bigtable Admin API Reference> for @bigtableadmin.projects.instances.get@.
module Network.Google.Resource.BigtableAdmin.Projects.Instances.Get
(
-- * REST Resource
ProjectsInstancesGetResource
-- * Creating a Request
, projectsInstancesGet
, ProjectsInstancesGet
-- * Request Lenses
, pigXgafv
, pigUploadProtocol
, pigAccessToken
, pigUploadType
, pigName
, pigCallback
) where
import Network.Google.BigtableAdmin.Types
import Network.Google.Prelude
-- | A resource alias for @bigtableadmin.projects.instances.get@ method which the
-- 'ProjectsInstancesGet' request conforms to.
type ProjectsInstancesGetResource =
"v2" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Instance
-- | Gets information about an instance.
--
-- /See:/ 'projectsInstancesGet' smart constructor.
data ProjectsInstancesGet =
ProjectsInstancesGet'
{ _pigXgafv :: !(Maybe Xgafv)
, _pigUploadProtocol :: !(Maybe Text)
, _pigAccessToken :: !(Maybe Text)
, _pigUploadType :: !(Maybe Text)
, _pigName :: !Text
, _pigCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsInstancesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pigXgafv'
--
-- * 'pigUploadProtocol'
--
-- * 'pigAccessToken'
--
-- * 'pigUploadType'
--
-- * 'pigName'
--
-- * 'pigCallback'
projectsInstancesGet
:: Text -- ^ 'pigName'
-> ProjectsInstancesGet
projectsInstancesGet pPigName_ =
ProjectsInstancesGet'
{ _pigXgafv = Nothing
, _pigUploadProtocol = Nothing
, _pigAccessToken = Nothing
, _pigUploadType = Nothing
, _pigName = pPigName_
, _pigCallback = Nothing
}
-- | V1 error format.
pigXgafv :: Lens' ProjectsInstancesGet (Maybe Xgafv)
pigXgafv = lens _pigXgafv (\ s a -> s{_pigXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pigUploadProtocol :: Lens' ProjectsInstancesGet (Maybe Text)
pigUploadProtocol
= lens _pigUploadProtocol
(\ s a -> s{_pigUploadProtocol = a})
-- | OAuth access token.
pigAccessToken :: Lens' ProjectsInstancesGet (Maybe Text)
pigAccessToken
= lens _pigAccessToken
(\ s a -> s{_pigAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pigUploadType :: Lens' ProjectsInstancesGet (Maybe Text)
pigUploadType
= lens _pigUploadType
(\ s a -> s{_pigUploadType = a})
-- | Required. The unique name of the requested instance. Values are of the
-- form \`projects\/{project}\/instances\/{instance}\`.
pigName :: Lens' ProjectsInstancesGet Text
pigName = lens _pigName (\ s a -> s{_pigName = a})
-- | JSONP
pigCallback :: Lens' ProjectsInstancesGet (Maybe Text)
pigCallback
= lens _pigCallback (\ s a -> s{_pigCallback = a})
instance GoogleRequest ProjectsInstancesGet where
type Rs ProjectsInstancesGet = Instance
type Scopes ProjectsInstancesGet =
'["https://www.googleapis.com/auth/bigtable.admin",
"https://www.googleapis.com/auth/bigtable.admin.cluster",
"https://www.googleapis.com/auth/bigtable.admin.instance",
"https://www.googleapis.com/auth/cloud-bigtable.admin",
"https://www.googleapis.com/auth/cloud-bigtable.admin.cluster",
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only"]
requestClient ProjectsInstancesGet'{..}
= go _pigName _pigXgafv _pigUploadProtocol
_pigAccessToken
_pigUploadType
_pigCallback
(Just AltJSON)
bigtableAdminService
where go
= buildClient
(Proxy :: Proxy ProjectsInstancesGetResource)
mempty
|
brendanhay/gogol
|
gogol-bigtableadmin/gen/Network/Google/Resource/BigtableAdmin/Projects/Instances/Get.hs
|
mpl-2.0
| 5,052 | 0 | 15 | 1,128 | 714 | 419 | 295 | 106 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.RegionInstanceGroups.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves the list of instance group resources contained within the
-- specified region.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.regionInstanceGroups.list@.
module Network.Google.Resource.Compute.RegionInstanceGroups.List
(
-- * REST Resource
RegionInstanceGroupsListResource
-- * Creating a Request
, regionInstanceGroupsList
, RegionInstanceGroupsList
-- * Request Lenses
, riglOrderBy
, riglProject
, riglFilter
, riglRegion
, riglPageToken
, riglMaxResults
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.regionInstanceGroups.list@ method which the
-- 'RegionInstanceGroupsList' request conforms to.
type RegionInstanceGroupsListResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"instanceGroups" :>
QueryParam "orderBy" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :>
Get '[JSON] RegionInstanceGroupList
-- | Retrieves the list of instance group resources contained within the
-- specified region.
--
-- /See:/ 'regionInstanceGroupsList' smart constructor.
data RegionInstanceGroupsList = RegionInstanceGroupsList'
{ _riglOrderBy :: !(Maybe Text)
, _riglProject :: !Text
, _riglFilter :: !(Maybe Text)
, _riglRegion :: !Text
, _riglPageToken :: !(Maybe Text)
, _riglMaxResults :: !(Textual Word32)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RegionInstanceGroupsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'riglOrderBy'
--
-- * 'riglProject'
--
-- * 'riglFilter'
--
-- * 'riglRegion'
--
-- * 'riglPageToken'
--
-- * 'riglMaxResults'
regionInstanceGroupsList
:: Text -- ^ 'riglProject'
-> Text -- ^ 'riglRegion'
-> RegionInstanceGroupsList
regionInstanceGroupsList pRiglProject_ pRiglRegion_ =
RegionInstanceGroupsList'
{ _riglOrderBy = Nothing
, _riglProject = pRiglProject_
, _riglFilter = Nothing
, _riglRegion = pRiglRegion_
, _riglPageToken = Nothing
, _riglMaxResults = 500
}
-- | Sorts list results by a certain order. By default, results are returned
-- in alphanumerical order based on the resource name. You can also sort
-- results in descending order based on the creation timestamp using
-- orderBy=\"creationTimestamp desc\". This sorts results based on the
-- creationTimestamp field in reverse chronological order (newest result
-- first). Use this to sort resources like operations so that the newest
-- operation is returned first. Currently, only sorting by name or
-- creationTimestamp desc is supported.
riglOrderBy :: Lens' RegionInstanceGroupsList (Maybe Text)
riglOrderBy
= lens _riglOrderBy (\ s a -> s{_riglOrderBy = a})
-- | Project ID for this request.
riglProject :: Lens' RegionInstanceGroupsList Text
riglProject
= lens _riglProject (\ s a -> s{_riglProject = a})
-- | Sets a filter expression for filtering listed resources, in the form
-- filter={expression}. Your {expression} must be in the format: field_name
-- comparison_string literal_string. The field_name is the name of the
-- field you want to compare. Only atomic field types are supported
-- (string, number, boolean). The comparison_string must be either eq
-- (equals) or ne (not equals). The literal_string is the string value to
-- filter to. The literal value must be valid for the type of field you are
-- filtering by (string, number, boolean). For string fields, the literal
-- value is interpreted as a regular expression using RE2 syntax. The
-- literal value must match the entire field. For example, to filter for
-- instances that do not have a name of example-instance, you would use
-- filter=name ne example-instance. You can filter on nested fields. For
-- example, you could filter on instances that have set the
-- scheduling.automaticRestart field to true. Use filtering on nested
-- fields to take advantage of labels to organize and search for results
-- based on label values. To filter on multiple expressions, provide each
-- separate expression within parentheses. For example,
-- (scheduling.automaticRestart eq true) (zone eq us-central1-f). Multiple
-- expressions are treated as AND expressions, meaning that resources must
-- match all expressions to pass the filters.
riglFilter :: Lens' RegionInstanceGroupsList (Maybe Text)
riglFilter
= lens _riglFilter (\ s a -> s{_riglFilter = a})
-- | Name of the region scoping this request.
riglRegion :: Lens' RegionInstanceGroupsList Text
riglRegion
= lens _riglRegion (\ s a -> s{_riglRegion = a})
-- | Specifies a page token to use. Set pageToken to the nextPageToken
-- returned by a previous list request to get the next page of results.
riglPageToken :: Lens' RegionInstanceGroupsList (Maybe Text)
riglPageToken
= lens _riglPageToken
(\ s a -> s{_riglPageToken = a})
-- | The maximum number of results per page that should be returned. If the
-- number of available results is larger than maxResults, Compute Engine
-- returns a nextPageToken that can be used to get the next page of results
-- in subsequent list requests.
riglMaxResults :: Lens' RegionInstanceGroupsList Word32
riglMaxResults
= lens _riglMaxResults
(\ s a -> s{_riglMaxResults = a})
. _Coerce
instance GoogleRequest RegionInstanceGroupsList where
type Rs RegionInstanceGroupsList =
RegionInstanceGroupList
type Scopes RegionInstanceGroupsList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient RegionInstanceGroupsList'{..}
= go _riglProject _riglRegion _riglOrderBy
_riglFilter
_riglPageToken
(Just _riglMaxResults)
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy RegionInstanceGroupsListResource)
mempty
|
rueshyna/gogol
|
gogol-compute/gen/Network/Google/Resource/Compute/RegionInstanceGroups/List.hs
|
mpl-2.0
| 7,338 | 0 | 19 | 1,615 | 753 | 452 | 301 | 110 | 1 |
module Betty.Signup.MailText
(
verHeaders,
verText,
verHtml
)
where
import ClassyPrelude.Yesod
import Network.Mail.Mime (Encoding (None), Part (..))
import Text.Blaze.Html.Renderer.Utf8 (renderHtml)
import Text.Shakespeare.Text (stext)
import Yesod.Auth.Email (VerUrl)
------------------------------------------------------------------------
verHeaders :: (IsString t, IsString t1) => [(t, t1)]
verHeaders = [("Subject", "Please verify your email address")]
------------------------------------------------------------------------
-- TODO: externalize the message itself.
verText :: VerUrl -> Part
verText verurl = Part { partType = "text/plain; charset=utf-8"
, partEncoding = None
, partFilename = Nothing
, partContent = encodeUtf8 [stext|
Hello,
Someone (possibly you?) has requested for an account with our service
using your email address.
If it was indeed you, please confirm your email address by visiting the
link below.
#{verurl}
If you did not create an account with us, please ignore this email.
Thank you!
|]
, partHeaders = []
}
------------------------------------------------------------------------
-- TODO: externalize the message itself.
verHtml :: VerUrl -> Part
verHtml verurl = Part { partType = "text/html; charset=utf-8"
, partEncoding = None
, partFilename = Nothing
, partContent = renderHtml [shamlet|
<p>Hello,
<p>Someone (possibly you?) has requested for an account with our service using your email address.
<p>If it was indeed you, please confirm your email address by visiting the link below.
<p>
<a href=#{verurl}>#{verurl}
<p>If you did not request for an account with us, please ignore this email.
<p>Thank you!
|]
, partHeaders = []
}
------------------------------------------------------------------------
|
sajith/betty-web
|
Betty/Signup/MailText.hs
|
agpl-3.0
| 2,066 | 0 | 7 | 543 | 245 | 157 | 88 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
--
-- Copyright (c) 2009-2012 Stefan Wehr - http://www.stefanwehr.de
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA
--
{- |
This module defines function for running a set of tests. Furthermore,
it provides functionality for organzing tests into a hierarchical
structure. This functionality is mainly used internally in the code
generated by the @hftpp@ pre-processor.
-}
module Test.Framework.TestManager (
-- * Re-exports
module Test.Framework.TestTypes,
-- * Running tests
htfMain, htfMainWithArgs, runTest, runTest', runTestWithArgs, runTestWithArgs',
runTestWithOptions, runTestWithOptions', runTestWithConfig, runTestWithConfig',
-- * Organzing tests
TestableHTF,
makeQuickCheckTest, makeUnitTest, makeBlackBoxTest, makeTestSuite,
makeAnonTestSuite,
addToTestSuite, testSuiteAsTest,
) where
import Control.Monad.RWS
import System.Exit (ExitCode(..), exitWith)
import System.Environment (getArgs)
import qualified Control.Exception as Exc
import Data.Maybe
import Data.Time
import qualified Data.List as List
import qualified Data.ByteString as BS
import Data.IORef
import Control.Concurrent
import System.IO
import Test.Framework.Utils
import Test.Framework.TestInterface
import Test.Framework.TestTypes
import Test.Framework.CmdlineOptions
import Test.Framework.TestReporter
import Test.Framework.Location
import Test.Framework.Colors
import Test.Framework.ThreadPool
import Test.Framework.History
-- | Construct a test where the given 'Assertion' checks a quick check property.
-- Mainly used internally by the htfpp preprocessor.
makeQuickCheckTest :: TestID -> Location -> Assertion -> Test
makeQuickCheckTest id loc ass = BaseTest QuickCheckTest id (Just loc) defaultTestOptions ass
-- | Construct a unit test from the given 'IO' action.
-- Mainly used internally by the htfpp preprocessor.
makeUnitTest :: AssertionWithTestOptions a => TestID -> Location -> a -> Test
makeUnitTest id loc ass =
BaseTest UnitTest id (Just loc) (testOptions ass) (assertion ass)
-- | Construct a black box test from the given 'Assertion'.
-- Mainly used internally.
makeBlackBoxTest :: TestID -> Assertion -> Test
makeBlackBoxTest id ass = BaseTest BlackBoxTest id Nothing defaultTestOptions ass
-- | Create a named 'TestSuite' from a list of 'Test' values.
makeTestSuite :: TestID -> [Test] -> TestSuite
makeTestSuite = TestSuite
-- | Create an unnamed 'TestSuite' from a list of 'Test' values.
makeAnonTestSuite :: [Test] -> TestSuite
makeAnonTestSuite = AnonTestSuite
-- | Turn a 'TestSuite' into a proper 'Test'.
testSuiteAsTest :: TestSuite -> Test
testSuiteAsTest = CompoundTest
-- | Extend a 'TestSuite' with a list of 'Test' values
addToTestSuite :: TestSuite -> [Test] -> TestSuite
addToTestSuite (TestSuite id ts) ts' = TestSuite id (ts ++ ts')
addToTestSuite (AnonTestSuite ts) ts' = AnonTestSuite (ts ++ ts')
-- | A type class for things that can be run as tests.
-- Mainly used internally.
class TestableHTF t where
flatten :: t -> [FlatTest]
instance TestableHTF Test where
flatten = flattenTest
instance TestableHTF TestSuite where
flatten = flattenTestSuite
instance TestableHTF t => TestableHTF [t] where
flatten = concatMap flatten
instance TestableHTF (IO a) where
flatten action = flatten (makeUnitTest "unnamed test" unknownLocation action)
flattenTest :: Test -> [FlatTest]
flattenTest (BaseTest sort id mloc opts x) =
[FlatTest sort (TestPathBase id) mloc (WithTestOptions opts x)]
flattenTest (CompoundTest ts) =
flattenTestSuite ts
flattenTestSuite :: TestSuite -> [FlatTest]
flattenTestSuite (TestSuite id ts) =
let fts = concatMap flattenTest ts
in map (\ft -> ft { ft_path = TestPathCompound (Just id) (ft_path ft) }) fts
flattenTestSuite (AnonTestSuite ts) =
let fts = concatMap flattenTest ts
in map (\ft -> ft { ft_path = TestPathCompound Nothing (ft_path ft) }) fts
maxRunTime :: TestConfig -> FlatTest -> Maybe Milliseconds
maxRunTime tc ft =
let mt1 = tc_maxSingleTestTime tc
mt2 =
case tc_prevFactor tc of
Nothing -> Nothing
Just d ->
case max (fmap htr_timeMs (findHistoricSuccessfulTestResult (historyKey ft) (tc_history tc)))
(fmap htr_timeMs (findHistoricTestResult (historyKey ft) (tc_history tc)))
of
Nothing -> Nothing
Just t -> Just $ ceiling (fromInteger (toInteger t) * d)
in case (mt1, mt2) of
(Just t1, Just t2) -> Just (min t1 t2)
(_, Nothing) -> mt1
(Nothing, _) -> mt2
-- | HTF uses this function to execute the given assertion as a HTF test.
performTestHTF :: Assertion -> IO FullTestResult
performTestHTF action =
do action
return (mkFullTestResult Pass Nothing)
`Exc.catches`
[Exc.Handler (\(HTFFailure res) -> return res)
,Exc.Handler handleUnexpectedException]
where
handleUnexpectedException exc =
case Exc.fromException exc of
Just (async :: Exc.AsyncException) ->
case async of
Exc.StackOverflow -> exceptionAsError exc
_ -> Exc.throwIO exc
_ -> exceptionAsError exc
exceptionAsError exc =
return (mkFullTestResult Error (Just $ show (exc :: Exc.SomeException)))
data TimeoutResult a
= TimeoutResultOk a
| TimeoutResultException Exc.SomeException
| TimeoutResultTimeout
timeout :: Int -> IO a -> IO (Maybe a)
timeout microSecs action
| microSecs < 0 = fmap Just action
| microSecs == 0 = return Nothing
| otherwise =
do resultChan <- newChan
finishedVar <- newIORef False
workerTid <- forkIO (wrappedAction resultChan finishedVar)
_ <- forkIO (threadDelay microSecs >> writeChan resultChan TimeoutResultTimeout)
res <- readChan resultChan
case res of
TimeoutResultTimeout ->
do atomicModifyIORef finishedVar (\_ -> (True, ()))
killThread workerTid
return Nothing
TimeoutResultOk x ->
return (Just x)
TimeoutResultException exc ->
Exc.throwIO exc
where
wrappedAction resultChan finishedVar =
Exc.mask $ \restore ->
(do x <- restore action
writeChan resultChan (TimeoutResultOk x))
`Exc.catch`
(\(exc::Exc.SomeException) ->
do b <- shouldReraiseException exc finishedVar
if b then Exc.throwIO exc else writeChan resultChan (TimeoutResultException exc))
shouldReraiseException exc finishedVar =
case Exc.fromException exc of
Just (async :: Exc.AsyncException) ->
case async of
Exc.ThreadKilled -> atomicModifyIORef finishedVar (\old -> (old, old))
_ -> return False
_ -> return False
data PrimTestResult
= PrimTestResultNoTimeout FullTestResult
| PrimTestResultTimeout
mkFlatTestRunner :: TestConfig -> FlatTest -> ThreadPoolEntry TR () (PrimTestResult, Milliseconds)
mkFlatTestRunner tc ft = (pre, action, post)
where
pre = reportTestStart ft
action _ =
let run = performTestHTF (wto_payload (ft_payload ft))
in case maxRunTime tc ft of
Nothing ->
do (res, time) <- measure run
return (PrimTestResultNoTimeout res, time)
Just maxMs ->
do mx <- timeout (1000 * maxMs) $ measure run
case mx of
Nothing -> return (PrimTestResultTimeout, maxMs)
Just (res, time) ->
return (PrimTestResultNoTimeout res, time)
post excOrResult =
let (testResult, time) =
case excOrResult of
Left exc ->
(FullTestResult
{ ftr_location = Nothing
, ftr_callingLocations = []
, ftr_message = Just $ noColor ("Running test unexpectedly failed: " ++ show exc)
, ftr_result = Just Error
}
,(-1))
Right (res, time) ->
case res of
PrimTestResultTimeout ->
(FullTestResult
{ ftr_location = Nothing
, ftr_callingLocations = []
, ftr_message = Just $ colorize warningColor "timeout"
, ftr_result = Nothing
}
,time)
PrimTestResultNoTimeout res ->
let res' =
if isNothing (ftr_message res) && isNothing (ftr_result res)
then res { ftr_message = Just (colorize warningColor "timeout") }
else res
in (res', time)
(sumRes, isTimeout) =
case ftr_result testResult of
Just x -> (x, False)
Nothing -> (if tc_timeoutIsSuccess tc then Pass else Error, True)
rr = FlatTest
{ ft_sort = ft_sort ft
, ft_path = ft_path ft
, ft_location = ft_location ft
, ft_payload = RunResult sumRes (ftr_location testResult)
(ftr_callingLocations testResult)
(fromMaybe emptyColorString (ftr_message testResult))
time isTimeout
}
in do modify (\s -> s { ts_results = rr : ts_results s })
reportTestResult rr
return (stopFlag sumRes)
stopFlag result =
if not (tc_failFast tc)
then DoNotStop
else case result of
Pass -> DoNotStop
Pending -> DoNotStop
Fail -> DoStop
Error -> DoStop
runAllFlatTests :: TestConfig -> [FlatTest] -> TR ()
runAllFlatTests tc tests' =
do reportGlobalStart tests
tc <- ask
case tc_threads tc of
Nothing ->
let entries = map (mkFlatTestRunner tc) tests
in tp_run sequentialThreadPool entries
Just i ->
let (ptests, stests) = List.partition (\t -> to_parallel (wto_options (ft_payload t))) tests
pentries' = map (mkFlatTestRunner tc) ptests
sentries = map (mkFlatTestRunner tc) stests
in do tp <- parallelThreadPool i
pentries <- if tc_shuffle tc
then liftIO (shuffleIO pentries')
else return pentries'
tp_run tp pentries
tp_run sequentialThreadPool sentries
where
tests = sortTests tests'
sortTests ts =
if not (tc_sortByPrevTime tc)
then ts
else map snd $ List.sortBy compareTests (map (\t -> (historyKey t, t)) ts)
compareTests (t1, _) (t2, _) =
case (max (fmap htr_timeMs (findHistoricSuccessfulTestResult t1 (tc_history tc)))
(fmap htr_timeMs (findHistoricTestResult t1 (tc_history tc)))
,max (fmap htr_timeMs (findHistoricSuccessfulTestResult t2 (tc_history tc)))
(fmap htr_timeMs (findHistoricTestResult t2 (tc_history tc))))
of
(Just t1, Just t2) -> compare t1 t2
(Just _, Nothing) -> GT
(Nothing, Just _) -> LT
(Nothing, Nothing) -> EQ
-- | Run something testable using the 'Test.Framework.TestConfig.defaultCmdlineOptions'.
runTest :: TestableHTF t => t -- ^ Testable thing
-> IO ExitCode -- ^ See 'runTestWithOptions' for a specification of the 'ExitCode' result
runTest = runTestWithOptions defaultCmdlineOptions
-- | Run something testable using the 'Test.Framework.TestConfig.defaultCmdlineOptions'.
runTest' :: TestableHTF t => t -- ^ Testable thing
-> IO (IO (), ExitCode) -- ^ 'IO' action for printing the overall test results, and exit code for the test run. See 'runTestWithOptions' for a specification of the 'ExitCode' result
runTest' = runTestWithOptions' defaultCmdlineOptions
-- | Run something testable, parse the 'CmdlineOptions' from the given commandline arguments.
-- Does not print the overall test results but returns an 'IO' action for doing so.
runTestWithArgs :: TestableHTF t => [String] -- ^ Commandline arguments
-> t -- ^ Testable thing
-> IO ExitCode -- ^ See 'runTestWithConfig' for a specification of the 'ExitCode' result.
runTestWithArgs args t =
do (printSummary, ecode) <- runTestWithArgs' args t
printSummary
return ecode
-- | Run something testable, parse the 'CmdlineOptions' from the given commandline arguments.
runTestWithArgs' :: TestableHTF t => [String] -- ^ Commandline arguments
-> t -- ^ Testable thing
-> IO (IO (), ExitCode) -- ^ 'IO' action for printing the overall test results, and exit code for the test run. See 'runTestWithConfig' for a specification of the 'ExitCode' result.
runTestWithArgs' args t =
case parseTestArgs args of
Left err ->
do hPutStrLn stderr err
return $ (return (), ExitFailure 1)
Right opts ->
runTestWithOptions' opts t
-- | Runs something testable with the given 'CmdlineOptions'.
-- See 'runTestWithConfig' for a specification of the 'ExitCode' result.
runTestWithOptions :: TestableHTF t => CmdlineOptions -> t -> IO ExitCode
runTestWithOptions opts t =
do (printSummary, ecode) <- runTestWithOptions' opts t
printSummary
return ecode
-- | Runs something testable with the given 'CmdlineOptions'. Does not
-- print the overall test results but returns an 'IO' action for doing so.
-- See 'runTestWithConfig' for a specification of the 'ExitCode' result.
runTestWithOptions' :: TestableHTF t => CmdlineOptions -> t -> IO (IO (), ExitCode)
runTestWithOptions' opts t =
if opts_help opts
then do hPutStrLn stderr helpString
return $ (return (), ExitFailure 1)
else do tc <- testConfigFromCmdlineOptions opts
(printSummary, ecode) <-
(if opts_listTests opts
then let fts = filter (opts_filter opts) (flatten t)
in return (runRWST (reportAllTests fts) tc initTestState >> return (), ExitSuccess)
else do (printSummary, ecode, history) <- runTestWithConfig' tc t
storeHistory (tc_historyFile tc) history
return (printSummary, ecode))
return (printSummary `Exc.finally` cleanup tc, ecode)
where
cleanup tc =
case tc_output tc of
TestOutputHandle h True -> hClose h
_ -> return ()
storeHistory file history =
BS.writeFile file (serializeTestHistory history)
-- | Runs something testable with the given 'TestConfig'.
-- The result is 'ExitSuccess' if all tests were executed successfully,
-- 'ExitFailure' otherwise. In the latter case, an error code of @1@ indicates
-- that failures but no errors occurred, otherwise the error code @2@ is used.
--
-- A test is /successful/ if the test terminates and no assertion fails.
-- A test is said to /fail/ if an assertion fails but no other error occur.
runTestWithConfig :: TestableHTF t => TestConfig -> t -> IO (ExitCode, TestHistory)
runTestWithConfig tc t =
do (printSummary, ecode, history) <- runTestWithConfig' tc t
printSummary
return (ecode, history)
-- | Runs something testable with the given 'TestConfig'. Does not
-- print the overall test results but returns an 'IO' action for doing so.
-- See 'runTestWithConfig' for a specification of the 'ExitCode' result.
runTestWithConfig' :: TestableHTF t => TestConfig -> t -> IO (IO (), ExitCode, TestHistory)
runTestWithConfig' tc t =
do let allTests = flatten t
activeTests = filter (tc_filter tc) allTests
filteredTests = filter (not . tc_filter tc) allTests
startTime <- getCurrentTime
((_, s, _), time) <-
measure $
runRWST (runAllFlatTests tc activeTests) tc initTestState
let results = reverse (ts_results s)
passed = filter (\ft -> (rr_result . ft_payload) ft == Pass) results
pending = filter (\ft -> (rr_result . ft_payload) ft == Pending) results
failed = filter (\ft -> (rr_result . ft_payload) ft == Fail) results
error = filter (\ft -> (rr_result . ft_payload) ft == Error) results
timedOut = filter (\ft -> (rr_timeout . ft_payload) ft) results
arg = ReportGlobalResultsArg
{ rgra_timeMs = time
, rgra_passed = passed
, rgra_pending = pending
, rgra_failed = failed
, rgra_errors = error
, rgra_timedOut = timedOut
, rgra_filtered = filteredTests
}
let printSummary =
runRWST (reportGlobalResults arg) tc (TestState [] (ts_index s)) -- keep index from run
!newHistory = updateHistory startTime results (tc_history tc)
return (printSummary >> return (),
case () of
_| length failed == 0 && length error == 0 -> ExitSuccess
| length error == 0 -> ExitFailure 1
| otherwise -> ExitFailure 2
,newHistory)
where
updateHistory :: UTCTime -> [FlatTestResult] -> TestHistory -> TestHistory
updateHistory time results history =
let runHistory = mkTestRunHistory time (map (\res -> HistoricTestResult {
htr_testId = historyKey res
, htr_result = rr_result (ft_payload res)
, htr_timedOut = rr_timeout (ft_payload res)
, htr_timeMs = rr_wallTimeMs (ft_payload res)
})
results)
in updateTestHistory runHistory history
-- | Runs something testable by parsing the commandline arguments as test options
-- (using 'parseTestArgs'). Exits with the exit code returned by 'runTestWithArgs'.
-- This function is the main entry point for running tests.
htfMain :: TestableHTF t => t -> IO ()
htfMain tests =
do args <- getArgs
htfMainWithArgs args tests
-- | Runs something testable by parsing the commandline arguments as test options
-- (using 'parseTestArgs'). Exits with the exit code returned by 'runTestWithArgs'.
htfMainWithArgs :: TestableHTF t => [String] -> t -> IO ()
htfMainWithArgs args tests =
do ecode <- runTestWithArgs args tests
exitWith ecode
|
ekarayel/HTF
|
Test/Framework/TestManager.hs
|
lgpl-2.1
| 20,400 | 0 | 26 | 6,558 | 4,378 | 2,258 | 2,120 | 337 | 12 |
module Terminology.Roots (quizByRoot, quizByMeaning) where
import Colors
questions = [("aer","air")
,("aero","gas")
,("acous, audi","hearing")
,("acusis","hearing condition")
,("aden","gland")
,("adip","fat")
,("adrena","adrenal")
,("alges","pain")
,("albumin","protein")
,("ambly","dim, dull")
,("andr","male")
,("aneur","widening")
,("angi","vessel")
,("aort","aorta")
,("arter","artery")
,("ather","plaque")
,("arthr","joint")
,("balan","glans penis")
,("bacter","bacteria")
,("blephar","eyelid")
,("burs","bursia")
,("bronch","bronchus")
,("capn","carbon dioxide")
,("card","heart")
,("carp","wrist")
,("cerebr","cerebrum")
,("cerumin","wax-like")
,("cervic","cervix")
,("chol","bile")
,("chondr","cartilage")
,("col","colon")
,("condyle","knob, knuckle")
,("corne","cornea")
,("conjunctiv","conjuctiva")
,("crani","skull")
,("crine","to secrete")
,("cutane","skin")
,("cyan", "blue")
,("cyt, cyte","cell")
,("cyst, vesic","bladder")
,("crypt","hidden")
,("dactyl","finger or toe")
,("derma","skin")
,("dont","teeth")
,("duoden","duodenum")
,("eclamps","shining forth")
,("embol","embolus")
,("encephal","brain")
,("enter","intestine")
,("esophag","esophagus")
,("esthes","sensation")
,("femor","thigh bone")
,("fibr","fiber")
,("fibul","small lower leg bone")
,("gastr","stomach")
,("glauc","gray")
,("glyc","sugar")
,("glycos","glucose")
,("gynec","woman")
,("hemat","blood")
,("hemo","blood")
,("hepat","liver")
,("herni","hernia")
,("hist","tissue")
,("humer","upper arm bone")
,("irid, ir","iris")
,("keto","ketones")
,("kerat","hard")
,("labyrinth","inner ear")
,("lact","milk")
,("lacrim","tear duct")
,("laryng","larynx")
,("lapar","abdomen")
,("leuk","white")
,("lingu","tongue")
,("lipid", "fat")
,("lith","stone")
,("lord","curve")
,("lymph","lymph")
,("mast, mamm","breast")
,("melan","black")
,("mening","meninges")
,("men", "menstruation")
,("ment","mind")
,("metr, hyster","uterus")
,("my","muscle")
,("myel","spinal cord")
,("myring, tympan","eardrum")
,("myx","mucus")
,("nas, rhin","nose")
,("necr","death")
,("nephr, ren, ur","kidney")
,("neur","nerve")
,("ocul, ophthalm","eye")
,("onych","nail")
,("opt, opia","vision (or eye)")
,("orexia","appetite")
,("orch, orchid","testis")
,("oste","bone")
,("ot","ear")
,("ovari","ovary")
,("ox, oxy","oxygen")
,("pancreat","pancreas")
,("partum","bring forth")
,("patell","knee cap")
,("pector","chest")
,("ped, pod","foot")
,("pelv","pelvis")
,("pepsia","digestion")
,("phag","swallow")
,("phalang","bones of fingers and toes")
,("phas","speech")
,("phleb","vein")
,("plas","development")
,("pneumon, pulmon","lung, air")
,("prostat","prostate gland")
,("psycho","mind")
,("rachi, spondyl","vertebrae")
,("respir","breath")
,("retin","retina")
,("rhabdo","rod")
,("salping","fallopian tube, eustachian tube")
,("scoli","crooked, bent")
,("semin","semen")
,("sepsis","to putrefy")
,("sinus","sinus")
,("stenosis","narrowing")
,("stigmat","point(ed)")
,("somat","body")
,("sperm","sperm")
,("splen","spleen")
,("stern","sternum, breastbone")
,("tendin","tendon")
,("test","testis, testicle")
,("tetan","tetanus")
,("thorax", "chest")
,("thromb","clot")
,("thyroid","thyroid")
,("thym","thymus")
,("thyroid","thyroid")
,("toc","birth")
,("tibi","large, lower leg bone")
,("uter","uterine")
,("uria, uresis","urination")
,("ureter","ureter")
,("urethr","urethra")
,("vagin","vagina")
,("varic","varicose veins")
,("vas, vascu","vessel or duct")
,("ven","vein")
]
quizByMeaning = [("What is the meaning of " ++ white x ++ "?", y) | (x,y) <- questions]
quizByRoot = [("What is the root of " ++ white y ++ "?", x) | (x,y) <- questions]
|
kohabi/rxquiz
|
src/Terminology/Roots.hs
|
unlicense
| 5,399 | 0 | 9 | 2,063 | 1,450 | 958 | 492 | 154 | 1 |
-- Copyright 2017 Google Inc.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module Language.Haskell.Indexer.Backend.GhcArgs
( GhcArgs(..)
, ToolOverride(..)
, defaultGhcArgs
) where
-- | Args to call GHC with, to perform the compilation.
data GhcArgs = GhcArgs
{ gaToolOverride :: !ToolOverride
, gaArgs :: [String]
, gaLibdirPrefix :: !FilePath
-- ^ Gets prepended to libdir path reported by ghc-paths. Needed if
-- runtime location of libdir differs from the reported one.
}
-- | Lets selective override of various tools used during compilation.
data ToolOverride = ToolOverride
{ overridePgmP :: !(Maybe FilePath)
-- ^ Override the preprocessor if needed. Only overrides the binary, but
-- keeps whatever flags were set on it originally (from the GHC command
-- line).
}
defaultGhcArgs :: GhcArgs
defaultGhcArgs = GhcArgs (ToolOverride Nothing) [] ""
|
robinp/haskell-indexer
|
haskell-indexer-backend-ghc/src/Language/Haskell/Indexer/Backend/GhcArgs.hs
|
apache-2.0
| 1,448 | 0 | 11 | 299 | 127 | 84 | 43 | 18 | 1 |
module Main where
import SIL.Serializer.C
import SIL.Serializer
import SIL
import Foreign.Marshal.Alloc
import Test.Hspec
import Test.QuickCheck
import Common
serializerSpec :: Spec
serializerSpec = do
describe "C dynamic representation" $ do
it "Serializing to C dynamic representation and back will give the same result" $ do
property (\test_iexpr -> do
let (TestIExpr iexpr) = test_iexpr
c_rep <- toC iexpr
hs_rep <- fromC c_rep
sil_free c_rep
hs_rep `shouldBe` iexpr
)
describe "Vector serialization" $ do
it "Serializing to Vector Word8 and back will give the same result" $ do
property (\test_iexpr -> do
let (TestIExpr iexpr) = test_iexpr
serialized = serialize iexpr
deserialized = unsafeDeserialize serialized
deserialized `shouldBe` iexpr
)
describe "C FFI and Haskell" $ do
it "IExpr -> Vector Word8 -> SIL_Serialized -> Vector Word8 -> IExpr: IExprs will be the same" $ do
property (\test_iexpr -> do
let (TestIExpr iexpr) = test_iexpr
serialized = serialize iexpr
ptr_serialized <- serializedToC serialized
serialized2 <- serializedFromC ptr_serialized
let deserialized = unsafeDeserialize serialized2
free ptr_serialized
deserialized `shouldBe` iexpr
)
it "IExpr -> CRep -> SIL_Serialized -> CRep -> IExpr: IExprs will be the same" $ do
property (\test_iexpr -> do
let (TestIExpr iexpr) = test_iexpr
c_rep <- toC iexpr
c_serialized <- sil_serialize c_rep
c_deserialized <- sil_deserialize c_serialized
hs_rep <- fromC c_deserialized
sil_free c_deserialized
free c_serialized
hs_rep `shouldBe` iexpr
)
it "IExpr -> Vector Word8 -> SIL_Serialized -> CRep -> IExpr: IExprs will be the same" $ do
property (\test_iexpr -> do
let (TestIExpr iexpr) = test_iexpr
serialized = serialize iexpr
ptr_serialized <- serializedToC serialized
c_deserialized <- sil_deserialize ptr_serialized
hs_rep <- fromC c_deserialized
sil_free c_deserialized
free ptr_serialized
hs_rep `shouldBe` iexpr
)
it "IExpr -> CRep -> SIL_Serialized -> Vector Word8 -> IExpr: IExprs will be the same" $ do
property (\test_iexpr -> do
let (TestIExpr iexpr) = test_iexpr
c_rep <- toC iexpr
ptr_serialized <- sil_serialize c_rep
serialized2 <- serializedFromC ptr_serialized
let deserialized = unsafeDeserialize serialized2
sil_free c_rep
free ptr_serialized
deserialized `shouldBe` iexpr
)
main :: IO ()
main = hspec serializerSpec
|
sfultong/stand-in-language
|
test/SerializerSpec.hs
|
apache-2.0
| 3,221 | 0 | 22 | 1,239 | 633 | 287 | 346 | 67 | 1 |
module RecursiveContents (getRecursiveContents) where
import Control.Monad (forM)
import System.Directory (doesDirectoryExist, getDirectoryContents)
import System.FilePath ((</>))
getRecursiveContents :: FilePath -> IO [FilePath]
getRecursiveContents topDir = do
names <- getDirectoryContents topDir
let properNames = filter (`notElem` [".", ".."]) names
-- paths <- forM properNames $ \name -> do
paths <- mapM (\name -> do
let path = topDir </> name
isDirectory <- doesDirectoryExist path
if isDirectory
then getRecursiveContents path
else return [path]) properNames
return $ concat paths
|
EricYT/Haskell
|
src/real_haskell/chapter-9/RecursiveContents.hs
|
apache-2.0
| 648 | 0 | 16 | 131 | 177 | 94 | 83 | 15 | 2 |
-- 101524
import Data.List(genericLength, group, partition, sort, transpose)
-- XXX community chest and chance are only shuffled once, not a random draw
-- XXX current markov state can't handle that accurately
nn = 3
dd = 4 -- XXX figure out why this doesn't give the right result for 6
-- matrix converges to correct answer after 20 iterations
ii = 50
-- square names
squareNames = ["GO", "A1", "CC1", "A2", "T1",
"R1", "B1", "CH1", "B2", "B3",
"JAIL", "C1", "U1", "C2", "C3",
"R2", "D1", "CC2", "D2", "D3",
"FP", "E1", "CH2", "E2", "E3",
"R3", "F1", "F2", "U2", "F3",
"G2J", "G1", "G2", "CC3", "G3",
"R4", "CH3", "H1", "T2", "H2"]
nameToSquare n = case lookup n $ zip squareNames [0..] of
Nothing -> error "nameToSquare: invalid name"
Just i -> i
-- given a starting square, distance to advance,
-- whether it was doubles, and probability
-- return a list of visited squares, their probabilities,
-- and whether the visited square the last
advance s ((r,d),p)
| s2 == nameToSquare "G2J" = [((jail, p), True)]
| s2 `elem` (map nameToSquare ["CC1","CC2","CC3"]) = chest
| s2 `elem` [ch1,ch2,ch3] = chance
| otherwise = [((s2, p), d)]
where s2 = (s + r) `mod` length squareNames
chest = [((go, p/16), d),
((jail, p/16), True),
((s2, 14*p/16), d)]
chance = [((go, p/16), d),
((jail, p/16), True),
((nameToSquare "C1", p/16), d),
((nameToSquare "E3", p/16), d),
((nameToSquare "H2", p/16), d),
((r1, p/16), d),
((nextRailroad, 2*p/16), d),
((nextUtility, p/16), d),
((s2-3, p/16), d), -- cannot wrap
((s2, 6*p/16), d)]
nextRailroad = if s2 == ch1 then nameToSquare "R2" else
if s2 == ch2 then nameToSquare "R3" else r1
nextUtility = if s2 == ch1 || s2 == ch3 then
nameToSquare "U1" else nameToSquare "U2"
[jail,go,ch1,ch2,ch3,r1] = map nameToSquare
["JAIL","GO","CH1","CH2","CH3","R1"]
-- given two dice with n sides
-- return a list of ((distance, is doubles), probability)
rollProbs n p = map listToProb $ group $ sort ds
where ds = [(a+b, a==b) | a <- [1..n], b <- [1..n]]
listToProb xs = (head xs, p * genericLength xs / genericLength ds)
-- simulate a single turn from the given starting square
-- produce a non-unique list of (ending square, probability) values
oneRoll n (s,p) = (map fst d, map fst m)
where (d,m) = partition snd $ concatMap (advance s) (rollProbs n p)
manyRolls n xs = (concat d, concat m)
where (d,m) = unzip $ map (oneRoll n) xs
oneTurn n s = m1 ++ m2 ++ m3 ++ mj
where (d1,m1) = manyRolls n [(s,1)]
(d2,m2) = manyRolls n d1
(js,m3) = manyRolls n d2
mj = [(nameToSquare "JAIL", sum $ map snd js)]
probsForSquare d s = map getProb [0..length squareNames - 1]
where getProb x = sum $ map snd $ filter (\(v,_) -> x==v) $ oneTurn d s
-- build an initial markov state space, and run it for a number of iterations
-- XXX detect rather than presume convergence
runMarkov d t = iterate matrixMult m !! t
where m = map (probsForSquare d) [0..length squareNames - 1]
matrixMult a = [[sum $ zipWith (*) (m !! i) (transpose a !! j) |
j <- [0..length m-1]] | i <- [0..length m-1]]
-- sort the converged markov state space by square
-- extract the top n squares, and combine them into a single number
-- JAIL is 10 and always the most popular, so ignore zero-padding
popularSquares n d i = sum $ zipWith (*) (iterate (*100) 1) $
reverse $ take n $ reverse $ map snd $
sort $ zip (head $ runMarkov d i) [0..]
main = putStrLn $ show $ popularSquares nn dd ii
|
higgsd/euler
|
hs/84.hs
|
bsd-2-clause
| 4,011 | 0 | 15 | 1,248 | 1,430 | 807 | 623 | 62 | 4 |
{-# LANGUAGE OverloadedStrings #-}
-- | 'GenericPackageDescription' Field descriptions
module Distribution.PackageDescription.Parsec.FieldDescr (
-- * Package description
pkgDescrFieldDescrs,
storeXFieldsPD,
-- * Library
libFieldDescrs,
storeXFieldsLib,
-- * Foreign library
foreignLibFieldDescrs,
storeXFieldsForeignLib,
-- * Executable
executableFieldDescrs,
storeXFieldsExe,
-- * Test suite
TestSuiteStanza (..),
emptyTestStanza,
testSuiteFieldDescrs,
storeXFieldsTest,
validateTestSuite,
-- * Benchmark
BenchmarkStanza (..),
emptyBenchmarkStanza,
benchmarkFieldDescrs,
storeXFieldsBenchmark,
validateBenchmark,
-- * Flag
flagFieldDescrs,
-- * Source repository
sourceRepoFieldDescrs,
-- * Setup build info
setupBInfoFieldDescrs,
) where
import Prelude ()
import Distribution.Compat.Prelude
import qualified Data.ByteString as BS
import Data.List (dropWhileEnd)
import qualified Distribution.Compat.Parsec as Parsec
import Distribution.Compiler (CompilerFlavor (..))
import Distribution.ModuleName (ModuleName)
import Distribution.Package
import Distribution.PackageDescription
import Distribution.Types.ForeignLib
import Distribution.Parsec.Class
import Distribution.Parsec.Types.Common
import Distribution.Parsec.Types.FieldDescr
import Distribution.Parsec.Types.ParseResult
import Distribution.PrettyUtils
import Distribution.Simple.Utils (fromUTF8BS)
import Distribution.Text (disp, display)
import Text.PrettyPrint (vcat)
-------------------------------------------------------------------------------
-- common FieldParsers
-------------------------------------------------------------------------------
-- | This is /almost/ @'many' 'Distribution.Compat.Parsec.anyChar'@, but it
--
-- * trims whitespace from ends of the lines,
--
-- * converts lines with only single dot into empty line.
--
freeTextFieldParser :: FieldParser String
freeTextFieldParser = dropDotLines <$ Parsec.spaces <*> many Parsec.anyChar
where
-- Example package with dot lines
-- http://hackage.haskell.org/package/copilot-cbmc-0.1/copilot-cbmc.cabal
dropDotLines "." = "."
dropDotLines x = intercalate "\n" . map dotToEmpty . lines $ x
dotToEmpty x | trim' x == "." = ""
dotToEmpty x = trim x
trim' = dropWhileEnd (`elem` (" \t" :: String))
-------------------------------------------------------------------------------
-- PackageDescription
-------------------------------------------------------------------------------
-- TODO: other-files isn't used in any cabal file on Hackage.
pkgDescrFieldDescrs :: [FieldDescr PackageDescription]
pkgDescrFieldDescrs =
[ simpleField "name"
disp parsec
packageName (\name pkg -> pkg{package=(package pkg){pkgName=name}})
, simpleField "version"
disp parsec
packageVersion (\ver pkg -> pkg{package=(package pkg){pkgVersion=ver}})
, simpleField "cabal-version"
(either disp disp) (Left <$> parsec <|> Right <$> parsec)
specVersionRaw (\v pkg -> pkg{specVersionRaw=v})
, simpleField "build-type"
(maybe mempty disp) (Just <$> parsec)
buildType (\t pkg -> pkg{buildType=t})
, simpleField "license"
disp (parsecMaybeQuoted parsec)
license (\l pkg -> pkg{license=l})
, simpleField "license-file"
showFilePath parsecFilePath
(\pkg -> case licenseFiles pkg of
[x] -> x
_ -> "")
(\l pkg -> pkg{licenseFiles=licenseFiles pkg ++ [l]})
-- We have both 'license-file' and 'license-files' fields.
-- Rather than declaring license-file to be deprecated, we will continue
-- to allow both. The 'license-file' will continue to only allow single
-- tokens, while 'license-files' allows multiple. On pretty-printing, we
-- will use 'license-file' if there's just one, and use 'license-files'
-- otherwise.
, listField "license-files"
showFilePath parsecFilePath
(\pkg -> case licenseFiles pkg of
[_] -> []
xs -> xs)
(\ls pkg -> pkg{licenseFiles=ls})
, simpleField "copyright"
showFreeText freeTextFieldParser
copyright (\val pkg -> pkg{copyright=val})
, simpleField "maintainer"
showFreeText freeTextFieldParser
maintainer (\val pkg -> pkg{maintainer=val})
, simpleField "stability"
showFreeText freeTextFieldParser
stability (\val pkg -> pkg{stability=val})
, simpleField "homepage"
showFreeText freeTextFieldParser
homepage (\val pkg -> pkg{homepage=val})
, simpleField "package-url"
showFreeText freeTextFieldParser
pkgUrl (\val pkg -> pkg{pkgUrl=val})
, simpleField "bug-reports"
showFreeText freeTextFieldParser
bugReports (\val pkg -> pkg{bugReports=val})
, simpleField "synopsis"
showFreeText freeTextFieldParser
synopsis (\val pkg -> pkg{synopsis=val})
, simpleField "description"
showFreeText freeTextFieldParser
description (\val pkg -> pkg{description=val})
, simpleField "category"
showFreeText freeTextFieldParser
category (\val pkg -> pkg{category=val})
, simpleField "author"
showFreeText freeTextFieldParser
author (\val pkg -> pkg{author=val})
, listField "tested-with"
showTestedWith parsecTestedWith
testedWith (\val pkg -> pkg{testedWith=val})
, listFieldWithSep vcat "data-files"
showFilePath parsecFilePath
dataFiles (\val pkg -> pkg{dataFiles=val})
, simpleField "data-dir"
showFilePath parsecFilePath
dataDir (\val pkg -> pkg{dataDir=val})
, listFieldWithSep vcat "extra-source-files"
showFilePath parsecFilePath
extraSrcFiles (\val pkg -> pkg{extraSrcFiles=val})
, listFieldWithSep vcat "extra-tmp-files"
showFilePath parsecFilePath
extraTmpFiles (\val pkg -> pkg{extraTmpFiles=val})
, listFieldWithSep vcat "extra-doc-files"
showFilePath parsecFilePath
extraDocFiles (\val pkg -> pkg{extraDocFiles=val})
]
-- | Store any fields beginning with "x-" in the customFields field of
-- a PackageDescription. All other fields will generate a warning.
storeXFieldsPD :: UnknownFieldParser PackageDescription
storeXFieldsPD f val pkg | beginsWithX f =
Just pkg { customFieldsPD = customFieldsPD pkg ++ [(fromUTF8BS f, trim val)] }
storeXFieldsPD _ _ _ = Nothing
-------------------------------------------------------------------------------
-- Library
-------------------------------------------------------------------------------
libFieldDescrs :: [FieldDescr Library]
libFieldDescrs =
[ listFieldWithSep vcat "exposed-modules" disp (parsecMaybeQuoted parsec)
exposedModules (\mods lib -> lib{exposedModules=mods})
, commaListFieldWithSep vcat "reexported-modules" disp parsec
reexportedModules (\mods lib -> lib{reexportedModules=mods})
, listFieldWithSep vcat "signatures" disp (parsecMaybeQuoted parsec)
signatures (\mods lib -> lib{signatures=mods})
, boolField "exposed"
libExposed (\val lib -> lib{libExposed=val})
] ++ map biToLib binfoFieldDescrs
where
biToLib = liftField libBuildInfo (\bi lib -> lib{libBuildInfo=bi})
storeXFieldsLib :: UnknownFieldParser Library
storeXFieldsLib f val l@Library { libBuildInfo = bi } | beginsWithX f =
Just $ l {libBuildInfo =
bi{ customFieldsBI = customFieldsBI bi ++ [(fromUTF8BS f, trim val)]}}
storeXFieldsLib _ _ _ = Nothing
-------------------------------------------------------------------------------
-- Foreign library
-------------------------------------------------------------------------------
foreignLibFieldDescrs :: [FieldDescr ForeignLib]
foreignLibFieldDescrs =
[ simpleField "type"
disp parsec
foreignLibType (\x flib -> flib { foreignLibType = x })
, listField "options"
disp parsec
foreignLibOptions (\x flib -> flib { foreignLibOptions = x })
, simpleField "lib-version-info"
(maybe mempty disp) (Just <$> parsec)
foreignLibVersionInfo (\x flib -> flib { foreignLibVersionInfo = x })
, simpleField "lib-version-linux"
(maybe mempty disp) (Just <$> parsec)
foreignLibVersionLinux (\x flib -> flib { foreignLibVersionLinux = x })
, listField "mod-def-file"
showFilePath parsecFilePath
foreignLibModDefFile (\x flib -> flib { foreignLibModDefFile = x })
] ++ map biToFLib binfoFieldDescrs
where
biToFLib = liftField foreignLibBuildInfo (\bi flib -> flib{foreignLibBuildInfo=bi})
storeXFieldsForeignLib :: UnknownFieldParser ForeignLib
storeXFieldsForeignLib f val l@ForeignLib { foreignLibBuildInfo = bi } | beginsWithX f =
Just $ l {foreignLibBuildInfo =
bi{ customFieldsBI = customFieldsBI bi ++ [(fromUTF8BS f, trim val)]}}
storeXFieldsForeignLib _ _ _ = Nothing
-------------------------------------------------------------------------------
-- Executable
-------------------------------------------------------------------------------
executableFieldDescrs :: [FieldDescr Executable]
executableFieldDescrs =
[ -- note ordering: configuration must come first, for
-- showPackageDescription.
simpleField "executable"
disp parsec
exeName (\xs exe -> exe{exeName=xs})
, simpleField "main-is"
showFilePath parsecFilePath
modulePath (\xs exe -> exe{modulePath=xs})
, simpleField "scope"
disp parsec
exeScope (\sc exe -> exe{exeScope=sc})
]
++ map biToExe binfoFieldDescrs
where
biToExe = liftField buildInfo (\bi exe -> exe{buildInfo=bi})
storeXFieldsExe :: UnknownFieldParser Executable
storeXFieldsExe f val e@Executable { buildInfo = bi } | beginsWithX f =
Just $ e {buildInfo = bi{ customFieldsBI = (fromUTF8BS f, trim val) : customFieldsBI bi}}
storeXFieldsExe _ _ _ = Nothing
-------------------------------------------------------------------------------
-- TestSuite
-------------------------------------------------------------------------------
-- | An intermediate type just used for parsing the test-suite stanza.
-- After validation it is converted into the proper 'TestSuite' type.
data TestSuiteStanza = TestSuiteStanza
{ testStanzaTestType :: Maybe TestType
, testStanzaMainIs :: Maybe FilePath
, testStanzaTestModule :: Maybe ModuleName
, testStanzaBuildInfo :: BuildInfo
}
emptyTestStanza :: TestSuiteStanza
emptyTestStanza = TestSuiteStanza Nothing Nothing Nothing mempty
testSuiteFieldDescrs :: [FieldDescr TestSuiteStanza]
testSuiteFieldDescrs =
[ simpleField "type"
(maybe mempty disp) (Just <$> parsec)
testStanzaTestType (\x suite -> suite { testStanzaTestType = x })
, simpleField "main-is"
(maybe mempty showFilePath) (Just <$> parsecFilePath)
testStanzaMainIs (\x suite -> suite { testStanzaMainIs = x })
, simpleField "test-module"
(maybe mempty disp) (Just <$> parsecMaybeQuoted parsec)
testStanzaTestModule (\x suite -> suite { testStanzaTestModule = x })
]
++ map biToTest binfoFieldDescrs
where
biToTest = liftField
testStanzaBuildInfo
(\bi suite -> suite { testStanzaBuildInfo = bi })
storeXFieldsTest :: UnknownFieldParser TestSuiteStanza
storeXFieldsTest f val t@TestSuiteStanza { testStanzaBuildInfo = bi }
| beginsWithX f =
Just $ t {testStanzaBuildInfo = bi{ customFieldsBI = (fromUTF8BS f,val):customFieldsBI bi}}
storeXFieldsTest _ _ _ = Nothing
validateTestSuite :: Position -> TestSuiteStanza -> ParseResult TestSuite
validateTestSuite pos stanza = case testStanzaTestType stanza of
Nothing -> return $
emptyTestSuite { testBuildInfo = testStanzaBuildInfo stanza }
Just tt@(TestTypeUnknown _ _) ->
pure emptyTestSuite
{ testInterface = TestSuiteUnsupported tt
, testBuildInfo = testStanzaBuildInfo stanza
}
Just tt | tt `notElem` knownTestTypes ->
pure emptyTestSuite
{ testInterface = TestSuiteUnsupported tt
, testBuildInfo = testStanzaBuildInfo stanza
}
Just tt@(TestTypeExe ver) -> case testStanzaMainIs stanza of
Nothing -> do
parseFailure pos (missingField "main-is" tt)
pure emptyTestSuite
Just file -> do
when (isJust (testStanzaTestModule stanza)) $
parseWarning pos PWTExtraBenchmarkModule (extraField "test-module" tt)
pure emptyTestSuite
{ testInterface = TestSuiteExeV10 ver file
, testBuildInfo = testStanzaBuildInfo stanza
}
Just tt@(TestTypeLib ver) -> case testStanzaTestModule stanza of
Nothing -> do
parseFailure pos (missingField "test-module" tt)
pure emptyTestSuite
Just module_ -> do
when (isJust (testStanzaMainIs stanza)) $
parseWarning pos PWTExtraMainIs (extraField "main-is" tt)
pure emptyTestSuite
{ testInterface = TestSuiteLibV09 ver module_
, testBuildInfo = testStanzaBuildInfo stanza
}
where
missingField name tt = "The '" ++ name ++ "' field is required for the "
++ display tt ++ " test suite type."
extraField name tt = "The '" ++ name ++ "' field is not used for the '"
++ display tt ++ "' test suite type."
-------------------------------------------------------------------------------
-- Benchmark
-------------------------------------------------------------------------------
-- | An intermediate type just used for parsing the benchmark stanza.
-- After validation it is converted into the proper 'Benchmark' type.
data BenchmarkStanza = BenchmarkStanza
{ benchmarkStanzaBenchmarkType :: Maybe BenchmarkType
, benchmarkStanzaMainIs :: Maybe FilePath
, benchmarkStanzaBenchmarkModule :: Maybe ModuleName
, benchmarkStanzaBuildInfo :: BuildInfo
}
emptyBenchmarkStanza :: BenchmarkStanza
emptyBenchmarkStanza = BenchmarkStanza Nothing Nothing Nothing mempty
benchmarkFieldDescrs :: [FieldDescr BenchmarkStanza]
benchmarkFieldDescrs =
[ simpleField "type"
(maybe mempty disp) (Just <$> parsec)
benchmarkStanzaBenchmarkType
(\x suite -> suite { benchmarkStanzaBenchmarkType = x })
, simpleField "main-is"
(maybe mempty showFilePath) (Just <$> parsecFilePath)
benchmarkStanzaMainIs
(\x suite -> suite { benchmarkStanzaMainIs = x })
]
++ map biToBenchmark binfoFieldDescrs
where
biToBenchmark = liftField benchmarkStanzaBuildInfo
(\bi suite -> suite { benchmarkStanzaBuildInfo = bi })
storeXFieldsBenchmark :: UnknownFieldParser BenchmarkStanza
storeXFieldsBenchmark f val t@BenchmarkStanza { benchmarkStanzaBuildInfo = bi } | beginsWithX f =
Just $ t {benchmarkStanzaBuildInfo =
bi{ customFieldsBI = (fromUTF8BS f, trim val):customFieldsBI bi}}
storeXFieldsBenchmark _ _ _ = Nothing
validateBenchmark :: Position -> BenchmarkStanza -> ParseResult Benchmark
validateBenchmark pos stanza = case benchmarkStanzaBenchmarkType stanza of
Nothing -> pure emptyBenchmark
{ benchmarkBuildInfo = benchmarkStanzaBuildInfo stanza }
Just tt@(BenchmarkTypeUnknown _ _) -> pure emptyBenchmark
{ benchmarkInterface = BenchmarkUnsupported tt
, benchmarkBuildInfo = benchmarkStanzaBuildInfo stanza
}
Just tt | tt `notElem` knownBenchmarkTypes -> pure emptyBenchmark
{ benchmarkInterface = BenchmarkUnsupported tt
, benchmarkBuildInfo = benchmarkStanzaBuildInfo stanza
}
Just tt@(BenchmarkTypeExe ver) -> case benchmarkStanzaMainIs stanza of
Nothing -> do
parseFailure pos (missingField "main-is" tt)
pure emptyBenchmark
Just file -> do
when (isJust (benchmarkStanzaBenchmarkModule stanza)) $
parseWarning pos PWTExtraBenchmarkModule (extraField "benchmark-module" tt)
pure emptyBenchmark
{ benchmarkInterface = BenchmarkExeV10 ver file
, benchmarkBuildInfo = benchmarkStanzaBuildInfo stanza
}
where
missingField name tt = "The '" ++ name ++ "' field is required for the "
++ display tt ++ " benchmark type."
extraField name tt = "The '" ++ name ++ "' field is not used for the '"
++ display tt ++ "' benchmark type."
-------------------------------------------------------------------------------
-- BuildInfo
-------------------------------------------------------------------------------
binfoFieldDescrs :: [FieldDescr BuildInfo]
binfoFieldDescrs =
[ boolField "buildable"
buildable (\val binfo -> binfo{buildable=val})
, commaListField "build-tools"
disp parsec
buildTools (\xs binfo -> binfo{buildTools=xs})
, commaListField "build-tool-depends"
disp parsec
buildToolDepends (\xs binfo -> binfo{buildToolDepends=xs})
, commaListFieldWithSep vcat "build-depends"
disp parsec
targetBuildDepends (\xs binfo -> binfo{targetBuildDepends=xs})
, commaListFieldWithSep vcat "mixins"
disp parsec
mixins (\xs binfo -> binfo{mixins=xs})
, spaceListField "cpp-options"
showToken parsecToken'
cppOptions (\val binfo -> binfo{cppOptions=val})
, spaceListField "cc-options"
showToken parsecToken'
ccOptions (\val binfo -> binfo{ccOptions=val})
, spaceListField "ld-options"
showToken parsecToken'
ldOptions (\val binfo -> binfo{ldOptions=val})
, commaListField "pkgconfig-depends"
disp parsec
pkgconfigDepends (\xs binfo -> binfo{pkgconfigDepends=xs})
, listField "frameworks"
showToken parsecToken
frameworks (\val binfo -> binfo{frameworks=val})
, listField "extra-framework-dirs"
showToken parsecFilePath
extraFrameworkDirs (\val binfo -> binfo{extraFrameworkDirs=val})
, listFieldWithSep vcat "c-sources"
showFilePath parsecFilePath
cSources (\paths binfo -> binfo{cSources=paths})
, listFieldWithSep vcat "js-sources"
showFilePath parsecFilePath
jsSources (\paths binfo -> binfo{jsSources=paths})
, simpleField "default-language"
(maybe mempty disp) (Parsec.optionMaybe $ parsecMaybeQuoted parsec)
defaultLanguage (\lang binfo -> binfo{defaultLanguage=lang})
, listField "other-languages"
disp (parsecMaybeQuoted parsec)
otherLanguages (\langs binfo -> binfo{otherLanguages=langs})
, listField "default-extensions"
disp (parsecMaybeQuoted parsec)
defaultExtensions (\exts binfo -> binfo{defaultExtensions=exts})
, listField "other-extensions"
disp (parsecMaybeQuoted parsec)
otherExtensions (\exts binfo -> binfo{otherExtensions=exts})
, listField "extensions"
-- TODO: this is deprecated field, isn't it?
disp (parsecMaybeQuoted parsec)
oldExtensions (\exts binfo -> binfo{oldExtensions=exts})
, listFieldWithSep vcat "extra-libraries"
showToken parsecToken
extraLibs (\xs binfo -> binfo{extraLibs=xs})
, listFieldWithSep vcat "extra-ghci-libraries"
showToken parsecToken
extraGHCiLibs (\xs binfo -> binfo{extraGHCiLibs=xs})
, listField "extra-lib-dirs"
showFilePath parsecFilePath
extraLibDirs (\xs binfo -> binfo{extraLibDirs=xs})
, listFieldWithSep vcat "includes"
showFilePath parsecFilePath
includes (\paths binfo -> binfo{includes=paths})
, listFieldWithSep vcat "install-includes"
showFilePath parsecFilePath
installIncludes (\paths binfo -> binfo{installIncludes=paths})
, listField "include-dirs"
showFilePath parsecFilePath
includeDirs (\paths binfo -> binfo{includeDirs=paths})
, listField "hs-source-dirs"
showFilePath parsecFilePath
hsSourceDirs (\paths binfo -> binfo{hsSourceDirs=paths})
, deprecatedField "hs-source-dirs" $ listField "hs-source-dir"
showFilePath parsecFilePath
(const []) (\paths binfo -> binfo{hsSourceDirs=paths})
, listFieldWithSep vcat "other-modules"
disp (parsecMaybeQuoted parsec)
otherModules (\val binfo -> binfo{otherModules=val})
, listFieldWithSep vcat "autogen-modules"
disp (parsecMaybeQuoted parsec)
autogenModules (\val binfo -> binfo{autogenModules=val})
, optsField "ghc-prof-options" GHC
profOptions (\val binfo -> binfo{profOptions=val})
, optsField "ghcjs-prof-options" GHCJS
profOptions (\val binfo -> binfo{profOptions=val})
, optsField "ghc-shared-options" GHC
sharedOptions (\val binfo -> binfo{sharedOptions=val})
, optsField "ghcjs-shared-options" GHCJS
sharedOptions (\val binfo -> binfo{sharedOptions=val})
, optsField "ghc-options" GHC
options (\path binfo -> binfo{options=path})
, optsField "ghcjs-options" GHCJS
options (\path binfo -> binfo{options=path})
, optsField "jhc-options" JHC
options (\path binfo -> binfo{options=path})
-- NOTE: Hugs and NHC are not supported anymore, but these fields are kept
-- around for backwards compatibility.
--
-- TODO: deprecate?
, optsField "hugs-options" Hugs
options (const id)
, optsField "nhc98-options" NHC
options (const id)
]
{-
storeXFieldsBI :: UnknownFieldParser BuildInfo
--storeXFieldsBI (f@('x':'-':_),val) bi = Just bi{ customFieldsBI = (f,val):customFieldsBI bi }
storeXFieldsBI _ _ = Nothing
-}
-------------------------------------------------------------------------------
-- Flag
-------------------------------------------------------------------------------
flagFieldDescrs :: [FieldDescr Flag]
flagFieldDescrs =
[ simpleField "description"
showFreeText freeTextFieldParser
flagDescription (\val fl -> fl{ flagDescription = val })
, boolField "default"
flagDefault (\val fl -> fl{ flagDefault = val })
, boolField "manual"
flagManual (\val fl -> fl{ flagManual = val })
]
-------------------------------------------------------------------------------
-- SourceRepo
-------------------------------------------------------------------------------
sourceRepoFieldDescrs :: [FieldDescr SourceRepo]
sourceRepoFieldDescrs =
[ simpleField "type"
(maybe mempty disp) (Just <$> parsec)
repoType (\val repo -> repo { repoType = val })
, simpleField "location"
(maybe mempty showFreeText) (Just <$> freeTextFieldParser)
repoLocation (\val repo -> repo { repoLocation = val })
, simpleField "module"
(maybe mempty showToken) (Just <$> parsecToken)
repoModule (\val repo -> repo { repoModule = val })
, simpleField "branch"
(maybe mempty showToken) (Just <$> parsecToken)
repoBranch (\val repo -> repo { repoBranch = val })
, simpleField "tag"
(maybe mempty showToken) (Just <$> parsecToken)
repoTag (\val repo -> repo { repoTag = val })
, simpleField "subdir"
(maybe mempty showFilePath) (Just <$> parsecFilePath)
repoSubdir (\val repo -> repo { repoSubdir = val })
]
-------------------------------------------------------------------------------
-- SetupBuildInfo
-------------------------------------------------------------------------------
setupBInfoFieldDescrs :: [FieldDescr SetupBuildInfo]
setupBInfoFieldDescrs =
[ commaListFieldWithSep vcat "setup-depends"
disp parsec
setupDepends (\xs binfo -> binfo{setupDepends=xs})
]
-------------------------------------------------------------------------------
-- Utilities
-------------------------------------------------------------------------------
-- | Predicate to test field names beginning with "x-"
beginsWithX :: FieldName -> Bool
beginsWithX bs = BS.take 2 bs == "x-"
-- | Mark the field as deprecated.
deprecatedField
:: FieldName -- ^ alternative field
-> FieldDescr a
-> FieldDescr a
deprecatedField newFieldName fd = FieldDescr
{ fieldName = oldFieldName
, fieldPretty = const mempty -- we don't print deprecated field
, fieldParser = \x -> do
parsecWarning PWTDeprecatedField $
"The field " <> show oldFieldName <>
" is deprecated, please use " <> show newFieldName
fieldParser fd x
}
where
oldFieldName = fieldName fd
-- Used to trim x-fields
trim :: String -> String
trim = dropWhile isSpace . dropWhileEnd isSpace
|
mydaum/cabal
|
Cabal/Distribution/PackageDescription/Parsec/FieldDescr.hs
|
bsd-3-clause
| 26,793 | 0 | 18 | 7,379 | 5,562 | 3,054 | 2,508 | 454 | 7 |
{-# LANGUAGE OverloadedStrings #-}
module ReadXLSX.Internal
where
import Codec.Xlsx
import Codec.Xlsx.Formatted
import Control.Lens
import Data.Aeson.Types (Value, Value (Number), Value (String),
Value (Bool), Value (Null))
import qualified Data.ByteString.Lazy as L
import Data.Either.Extra (fromLeft', fromRight', isRight)
import Data.List (elemIndex, elemIndices)
import Data.Map (Map)
import qualified Data.Map as DM
import Data.Maybe (fromJust, fromMaybe, isJust, isNothing)
import Data.Scientific (floatingOrInteger, fromFloatDigits)
import qualified Data.Set as DS
import Data.Text (Text, pack)
import qualified Data.Text as T
import Empty (emptyCell, emptyFormattedCell)
import ExcelDates (intToDate)
import TextShow (showt)
showInt :: Int -> Text
showInt = pack . show
type FormattedCellMap = Map (Int, Int) FormattedCell
cellValueToValue :: Maybe CellValue -> Value
cellValueToValue cellvalue =
case cellvalue of
Just (CellDouble x) -> Number (fromFloatDigits x)
Just (CellText x) -> String x
Just (CellBool x) -> Bool x
Nothing -> Null
Just (CellRich x) -> String (T.concat $ _richTextRunText <$> x)
cellToCellValue :: Cell -> Value
cellToCellValue = cellValueToValue . _cellValue
hasDateFormat :: FormattedCell -> Bool
hasDateFormat fcell =
case view formatNumberFormat $ view formattedFormat fcell of
Just (StdNumberFormat x) -> x `elem` [NfMmDdYy, NfDMmmYy, NfDMmm,
NfMmmYy, NfHMm12Hr, NfHMmSs12Hr,
NfHMm, NfHMmSs, NfMdyHMm]
Just (UserNumberFormat x) -> x `elem` ["yyyy\\-mm\\-dd;@", "[$-F800]dddd\\,\\ mmmm\\ dd\\,\\ yyyy",
"d/mm/yyyy;@", "d/mm/yy;@",
"dd\\.mm\\.yy;@", "yy/mm/dd;@",
"dd\\-mm\\-yy;@", "dd/mm/yyyy;@",
"[$-80C]dddd\\ d\\ mmmm\\ yyyy;@",
"[$-80C]d\\ mmmm\\ yyyy;@",
"[$-80C]dd\\-mmm\\-yy;@", "m/d;@", "m/d/yy;@", "mm/dd/yy;@",
"[$-409]d\\-mmm\\-yy;@", "[$-409]dd\\-mmm\\-yy;@",
"[$-409]mmm\\-yy;@", "[$-409]mmmm\\-yy;@", "[$-409]mmmm\\ d\\,\\ yyyy;@",
"[$-409]m/d/yy\\ h:mm\\ AM/PM;@", "m/d/yy\\ h:mm;@", "m/d/yy\\ h:mm;@",
"[$-409]d\\-mmm\\-yyyy;@", "[$-409]mmmmm\\-yy;@", "[$-409]mmmmm;@"]
Nothing -> False
isValidDateCell :: FormattedCell -> Bool
isValidDateCell fcell =
if hasDateFormat fcell
then
case (_cellValue . _formattedCell) fcell of
Just (CellDouble _) -> True
Nothing -> True
_ -> False
else
False
fcellToCellFormat :: FormattedCell -> Value
fcellToCellFormat fcell =
case view formatNumberFormat $ view formattedFormat fcell of
Just (StdNumberFormat x) -> String (pack (show x))
Just (UserNumberFormat x) -> String x
Nothing -> Null
fcellToCellType :: FormattedCell -> Value
fcellToCellType fcell
| isNothing cellvalue = Null
| isValidDateCell fcell = String "date"
| otherwise =
case cellvalue of
Just (CellDouble _) -> String "number"
Just (CellText _) -> String "text"
Just (CellBool _) -> String "boolean"
Just (CellRich _) -> String "richtext"
where cellvalue = (_cellValue . _formattedCell) fcell
fcellToCellValue :: FormattedCell -> Value
fcellToCellValue fcell =
if hasDateFormat fcell
then
case _cellValue cell of
Just (CellDouble x) -> String (intToDate $ round x)
_ -> cellToCellValue cell
-- Nothing -> Null
-- _ -> String "anomalous date detected!" -- pb file Walter
else
cellToCellValue cell
where cell = _formattedCell fcell
--
-- COMMENTS
--
commentTextAsValue :: XlsxText -> Value
commentTextAsValue comment =
case comment of
XlsxText text -> String text
XlsxRichText richtextruns -> String (T.concat $ _richTextRunText <$> richtextruns)
cellToCommentValue :: Cell -> Value
cellToCommentValue cell =
case _cellComment cell of
Just comment -> commentTextAsValue $ _commentText comment
Nothing -> Null
fcellToCellComment :: FormattedCell -> Value
fcellToCellComment = cellToCommentValue . _formattedCell
--
-- filters
--
cleanCellMap :: CellMap -> CellMap
cleanCellMap = DM.filter (\cell -> (isJust . _cellValue) cell || (isJust . _cellComment) cell)
isNonEmptyWorksheet :: Worksheet -> Bool
isNonEmptyWorksheet ws = cleanCellMap (_wsCells ws) /= DM.empty
getNonEmptySheets :: Xlsx -> Map Text Worksheet
getNonEmptySheets xlsx = DM.fromList $ filter (\sheet -> isNonEmptyWorksheet (snd sheet)) (_xlSheets xlsx)
filterCellMap :: Maybe Int -> Maybe Int -> CellMap -> CellMap
filterCellMap firstRow lastRow = DM.filterWithKey f
where f (i,j) cell = i >= fr && i <= lr && (isJust . _cellValue) cell
fr = fromMaybe 1 firstRow
lr = fromMaybe (maxBound::Int) lastRow
filterFormattedCellMap :: Maybe Int -> Maybe Int -> FormattedCellMap -> FormattedCellMap
filterFormattedCellMap firstRow lastRow = DM.filterWithKey f
where f (i,j) fcell = i >= fr && i <= lr && ((isJust . _cellValue) cell || (isJust . _cellComment) cell)
where cell = _formattedCell fcell
fr = fromMaybe 1 firstRow
lr = fromMaybe (maxBound::Int) lastRow
cleanFormattedCellMap :: FormattedCellMap -> FormattedCellMap
cleanFormattedCellMap = DM.filter (\fcell -> (isJust . _cellValue . _formattedCell) fcell || (isJust . _cellComment . _formattedCell) fcell)
--
-- read files
--
getXlsxAndStyleSheet :: FilePath -> IO (Xlsx, StyleSheet)
getXlsxAndStyleSheet file =
do
bs <- L.readFile file
let xlsx = toXlsx bs
let stylesheet = fromRight' $ parseStyleSheet $ _xlStyles xlsx
return (xlsx, stylesheet)
--
-- headers
--
valueToText :: Value -> Maybe Text
valueToText value =
case value of
(Number x) -> Just y
where y = if isRight z then showInt (fromRight' z) else showt (fromLeft' z)
z = floatingOrInteger x :: Either Float Int
(String a) -> Just a
(Bool a) -> Just (showt a)
Null -> Nothing
cellsRange :: Map (Int, Int) a -> ([Int], Int, Int)
cellsRange cells = (colRange, firstCol, firstRow)
where colRange = [firstCol .. maximum colCoords]
colCoords = map snd keys
firstCol = minimum colCoords
firstRow = minimum $ map fst keys
keys = DM.keys cells
getHeader :: CellMap -> Int -> Int -> Int -> Text
getHeader cells firstRow firstCol j =
fromMaybe (T.concat [pack "X", showInt (j-firstCol+1)]) $
valueToText . cellToCellValue $
fromMaybe emptyCell (DM.lookup (firstRow, j) cells)
colHeaders :: CellMap -> [Text]
colHeaders cells = map (getHeader cells firstRow firstCol) colRange
where (colRange, firstCol, firstRow) = cellsRange cells
colHeadersAsMap :: CellMap -> Map Int Text
colHeadersAsMap cells = DM.fromSet (getHeader cells firstRow firstCol) (DS.fromList colRange)
where (colRange, firstCol, firstRow) = cellsRange cells
getHeader2 :: FormattedCellMap -> Int -> Int -> Int -> Text
getHeader2 cells firstRow firstCol j =
fromMaybe (T.concat [pack "X", showInt (j-firstCol+1)]) $
valueToText . fcellToCellValue $
fromMaybe emptyFormattedCell (DM.lookup (firstRow, j) cells)
colHeaders2 :: FormattedCellMap -> Bool -> [Text]
colHeaders2 cells fix =
if fix
then
fixHeaders headers
else
headers
where headers = map (getHeader2 cells firstRow firstCol) colRange
where (colRange, firstCol, firstRow) = cellsRange cells
-- JE DEVRAIS TOUJOURS FAIRE fixHeaders, car Aeson supprime les duplicates !!
fixHeaders :: [Text] -> [Text]
fixHeaders colnames =
case colnames of
[ _ ] -> colnames
_ -> (head out) : (fixHeaders $ tail out)
where out = map append $ zip colnames [0 .. length colnames - 1]
append (name,index) =
case index of
0 -> if indices /= [] then T.concat [name, pack "_", showInt 1] else name
_ -> if index `elem` indices
then
T.concat [name, pack "_", showInt $ 2 + fromJust (elemIndex index indices)]
else
name
where indices = map (+1) $ elemIndices x xs
x:xs = colnames
|
stla/jsonxlsx
|
src/ReadXLSX/Internal.hs
|
bsd-3-clause
| 9,082 | 0 | 16 | 2,808 | 2,494 | 1,315 | 1,179 | 175 | 5 |
{-|
This module contains the datatype definitions of the Abstract Syntax Tree
-}
module NScriptParser.AST where
data Program
= DefStmt String Exp
| ExpStmt Exp
data Exp
= Let String Exp Exp
| Lambda String Exp
| FunApp Exp Exp
| If Exp Exp Exp
| Arith ArithOp Exp Exp
| Compare CompareOp Exp Exp
| IntLit Integer
| BoolLit Bool
| StrLit String
| Var String
deriving (Show, Eq)
data ArithOp
= Plus
| Minus
| Mul
| Div
deriving (Show, Eq)
data CompareOp
= Less
| LessEq
| Greater
| GreaterEq
| Equal
| NotEqual
deriving (Show, Eq)
|
grievejia/njuscript
|
src/NScriptParser/AST.hs
|
bsd-3-clause
| 695 | 0 | 6 | 266 | 168 | 98 | 70 | 30 | 0 |
module Main where
-- Wir sollten die tatsaechlichen Algorithmen trennen
-- von den Bedingungen, wie lange iteriert wird.
-- Das würde alles viel einfacher machen, denn
-- die Abbruchbedingungen sind nur Nebensache.
--
-- So koennen die eigentlichen Algorithmen unabhängig von
-- den Abbruchbedingungen programmiert werden.
--
-- Die Auswertungsstrategie (wie lazy) wuerde in den
-- Treiber-Funktionen implementiert werden.
-- 0. Datenstrukturen
data Optimisation = Optimisation { outerLoop :: [OuterLoop] } deriving (Show)
data OuterLoop =
OuterLoop { outerLoopResult :: Integer,
innerLoop :: InnerLoop } deriving (Show)
data InnerLoop = InnerLoop { items :: [Integer] } deriving (Show)
-- 1. Die eigentlichen Algorithmen (dummies)
outerAlgorithm :: Integer -> Integer
outerAlgorithm x = 503*x `mod` 101 + 1
innerAlgorithm :: Integer -> Integer
innerAlgorithm x = 2*x + 1
extractQuintessenceForInnerLoop :: Integer -> Integer
extractQuintessenceForInnerLoop x = x + 1
-- 2. Die Abbruchbedingung
condition :: OuterLoop -> OuterLoop
condition (OuterLoop ores (InnerLoop xs)) =
OuterLoop ores (InnerLoop $ take maxStep $ takeWhile p xs)
where
-- Hier die Bedingung:
p x = True
-- Maximale Anzahl von Schritten, falls p nicht greift.
maxStep = 10
-- 3. Der treibende Algorithmus
-- Diese Funktionen muessen nie mehr angefasst werden,
-- ausser um die Auswertungsstrategie (lazyness) zu kontrollieren.
mkInnerLoop :: Integer -> InnerLoop
mkInnerLoop n = InnerLoop $ iterate innerAlgorithm n
mkOuterLoop :: Integer -> OuterLoop
mkOuterLoop n =
let res = outerAlgorithm n
in OuterLoop res (mkInnerLoop (extractQuintessenceForInnerLoop res))
-- Diese Treiber-Funktion soll aufgerufen werden
-- um die Optimierung zu starten
optimisation :: Optimisation
optimisation =
let start = 0
loop0 = condition (mkOuterLoop start)
f (OuterLoop ores (InnerLoop xs)) = condition (mkOuterLoop (last xs))
in Optimisation $ iterate f loop0
----------------------------------------------------------------
-- Zu Illustrationszwecken
-- Die ersten 6 Schritte wie oben.
optimisation_explicit :: Optimisation
optimisation_explicit =
let start = 0
lastInner (OuterLoop _ (InnerLoop xs)) = last xs
loop0 = condition (mkOuterLoop start)
loop1 = condition (mkOuterLoop (lastInner loop0))
loop2 = condition (mkOuterLoop (lastInner loop1))
loop3 = condition (mkOuterLoop (lastInner loop2))
loop4 = condition (mkOuterLoop (lastInner loop3))
loop5 = condition (mkOuterLoop (lastInner loop4))
loop6 = condition (mkOuterLoop (lastInner loop5))
in Optimisation [loop0, loop1, loop2, loop3, loop4, loop5, loop6]
|
energyflowanalysis/efa-2.1
|
sandbox/loops/Main.hs
|
bsd-3-clause
| 2,728 | 0 | 13 | 512 | 611 | 330 | 281 | 41 | 1 |
{-# LANGUAGE TypeSynonymInstances, MultiParamTypeClasses, FlexibleInstances, FlexibleContexts, UndecidableInstances #-}
module TypeError(TypeError(..), TypeErrorMonad, untypable, notRank1Types, tooMuchTypes) where
import Control.Monad.Error
data TypeError = TooMuchTypes [String]
| NotRank1Types [String]
| Untypable
| UnknownError String
instance Show TypeError where
show (TooMuchTypes lst) = show lst ++ " are not free nor act and " ++
"not allowed to have user types"
show Untypable = "Term is untypable"
show (NotRank1Types x) = "Variables " ++ show x ++ " is given not rank-1 type"
show (UnknownError str) = "Unknown error: " ++ str
instance Error TypeError where
strMsg = UnknownError
type TypeErrorMonad = Either TypeError
untypable :: (MonadError TypeError m, Monad m) => m a
untypable = throwError Untypable
notRank1Types :: (MonadError TypeError m, Monad m) => [String] -> m a
notRank1Types = throwError . NotRank1Types
tooMuchTypes :: (MonadError TypeError m) => [String] -> m a
tooMuchTypes = throwError . TooMuchTypes
|
projedi/type-inference-rank2
|
src/TypeError.hs
|
bsd-3-clause
| 1,107 | 0 | 8 | 220 | 275 | 150 | 125 | 22 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
--------------------------------------------------------------------------
-- |
-- Module: Game.Waddle.ExportJS
-- Copyright: (c) 2015 Martin Grabmueller
-- License: BSD3
--
-- Maintainer: [email protected]
-- Stability: provisional
-- Portability: portable
--
-- The function 'exportJS' exports a WAD file into several JavaScript
-- files, where each file defines an object:
--
-- * One file for each level, called @level-E1M1.js@, @level-MAP03.js@
-- etc. Each file defines an object for the level, called
-- e.g. @level_E1M1@. Example:
--
-- > var level_E1M1 = {
-- > things: [ ... ],
-- > ...
-- > vertices: [
-- > {x:1088,y:-3680},
-- > ...
-- > ],
-- > ...
-- > };
--
-- * One file @levels.js@, which includes all levels and defines an
-- object called @levels@. Example:
--
-- > var levels = {"MAP01": level_MAP01, ..., "MAP32": level_MAP32};
--
-- * One file for textures, called @textures.js@, defining an object
-- @textures@. Example:
--
-- > var textures = {
-- > "AASHITTY": {name:"AASHITTY",width:64,height:64,patches:[
-- > {xoffset:0,yoffset:0,pname:0,stepdir:1,colormap:0}
-- > ]},
-- > ...
-- > };
--
-- * One file for flats (floors and ceilings), called @flats.js@,
-- defining an object @flats@. Example:
--
-- > var flats = {
-- > "BLOOD1":{name:"BLOOD1",data:[46,46,45,...]},
-- > ...
-- > };
--
-- * One file for sprites, called @sprites.js@, defining an object
-- @sprites@. No example, I think you get the idea!
--
-- * One file for patches, called @patches.js@, defining an object
-- @patches@.
--
-- * One file for pnames, called @pnames.js@, defining a list @pnames@.
--
-- * One file for palettes, called @palettes.js@, defining a list of
-- lists @palettes@.
--
-- * One file for colormaps, called @colormap.js@, defining a list
-- @colormap@.
--
-- To see how this data can be used, have a look at the HTML5 view
-- included in the distribution in directory "visualize".
----------------------------------------------------------------------------
module Game.Waddle.ExportJS
(exportJS) where
import Game.Waddle.Types
import System.IO
import Data.List
import Text.Printf
import Data.Bits
import Data.Word
import Data.CaseInsensitive(CI)
import Data.Map(Map)
import qualified Data.Map as Map
import Control.Monad
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BS8
-- | Exports a WAD structure into several JavaScript files:
--
-- * One file for each level, called like @level_E1M1.js@ or @level_MAP13.js@
-- * @levels.js@
-- * @textures.js@
-- * @flats.js@
-- * @sprites.js@
-- * @patches.js@
-- * @pnames.js@
-- * @palettes.js
-- * @colormap.js@
--
exportJS :: Wad -> FilePath -> IO ()
exportJS Wad{..} dir = do
printf "levels: %d\n" (Map.size wadLevels)
forM_ (Map.elems wadLevels) $ \ Level{..} -> do
printf "Level %s:\n" (BS8.unpack levelName)
printf " vertices: %d\n" (length levelVertices)
printf " sideDefs: %d\n" (length levelSideDefs)
printf " lineDefs: %d\n" (length levelLineDefs)
printf " sectors: %d\n" (length levelSectors)
printf " reject: %d\n" (maybe 0 (BS.length . rejectBytes) levelReject)
printf " blockmap: %d\n"
(case levelBlockmap of
Just Blockmap{..} -> (blockmapColumns * blockmapRows)
Nothing -> 0)
printf " things: %d\n" (length levelThings)
printf "flats: %d\n" (Map.size wadFlats)
printf "sprites: %d\n" (Map.size wadSprites)
printf "textures: %d\n" (Map.size wadTextures)
printf "patches: %d\n" (Map.size wadPatches)
printf "pnames: %d\n" (Map.size wadPNames)
forM_ (Map.elems wadLevels) $ \ level@Level{..} -> do
withFile (printf "%s/level-%s.js" dir (BS8.unpack levelName)) WriteMode $ \ h ->
exportLevel h level
withFile (printf "%s/levels.js" dir) WriteMode $ \ h ->
exportLevels h wadLevels
withFile (printf "%s/sprites.js" dir) WriteMode $ \ h ->
exportSprites h wadSprites
withFile (printf "%s/patches.js" dir) WriteMode $ \ h ->
exportPatches h wadPatches
withFile (printf "%s/textures.js" dir) WriteMode $ \ h ->
exportTextures h wadTextures
withFile (printf "%s/pnames.js" dir) WriteMode $ \ h ->
exportPNames h wadPNames
withFile (printf "%s/flats.js" dir) WriteMode $ \ h ->
exportFlats h wadFlats
withFile (printf "%s/colormap.js" dir) WriteMode $ \ h ->
exportColormap h wadColormap
withFile (printf "%s/palettes.js" dir) WriteMode $ \ h ->
exportPalettes h wadPalettes
return ()
exportThing :: Handle -> (String, Thing) -> IO ()
exportThing h (comma, Thing{..}) = do
hPrintf h " %s{x:%d,y:%d,angle:%d,type:\"%s\",flags:%d}\n" comma
thingX thingY thingAngle (show thingType) thingFlags
exportVertex :: Handle -> (String, Vertex) -> IO ()
exportVertex h (comma, Vertex{..}) = do
hPrintf h " %s{x:%d,y:%d}\n" comma
vertexX vertexY
exportLineDef :: Handle -> (String, LineDef) -> IO ()
exportLineDef h (comma, LineDef{..}) = do
hPrintf h " %s{start:%d,end:%d,flags:%d,effect:%d,tag:%d,right:%d,left:%s}\n" comma
lineDefStartVertex lineDefEndVertex lineDefFlags lineDefEffect lineDefTag
lineDefRightSideDef (maybe "null" show lineDefLeftSideDef)
exportSideDef :: Handle -> (String, SideDef) -> IO ()
exportSideDef h (comma, SideDef{..}) = do
hPrintf h " %s{xofs:%d,yofs:%d,upperTexture:%s,lowerTexture:%s,middleTexture:%s,sector:%d}\n" comma
sideDefXOffset sideDefYOffset (show sideDefUpperTextureName)
(show sideDefLowerTextureName) (show sideDefMiddleTextureName)
sideDefSector
exportNode :: Handle -> (String, Node) -> IO ()
exportNode h (comma, Node{..}) = do
hPrintf h " %s{x:%d,y:%d,dx:%d,dy:%d,rbbuy:%d,rbbly:%d,rbblx:%d,rbbux:%d,lbbuy:%d,lbbly:%d,lbblx:%d,lbbux:%d,rightNodeOrSSector:%d,leftNodeOrSSector:%d}\n" comma
nodeX nodeY nodeDX nodeDY
nodeRightBBUY nodeRightBBLY nodeRightBBLX nodeRightBBUX
nodeLeftBBUY nodeLeftBBLY nodeLeftBBLX nodeLeftBBUX
((either fromIntegral ((.|. 0x8000) . fromIntegral) nodeRightNodeOrSSector) :: Word16)
((either fromIntegral ((.|. 0x8000) . fromIntegral) nodeLeftNodeOrSSector) :: Word16)
exportSector :: Handle -> (String, Sector) -> IO ()
exportSector h (comma, Sector{..}) = do
hPrintf h " %s{floorHeight:%d,ceilingHeight:%d,floorFlat:%s,ceilingFlat:%s,lightLevel:%d,special:%d,tag:%d}\n" comma
sectorFloorHeight sectorCeilingHeight (show sectorFloorFlat) (show sectorCeilingFlat)
sectorLightLevel sectorSpecial sectorTag
exportSeg :: Handle -> (String, Seg) -> IO ()
exportSeg h (comma, Seg{..}) = do
hPrintf h " %s{start:%d,end:%d,angle:%d,lineDef:%d,direction:%d,offset:%d}\n" comma
segStartVertex segEndVertex segAngle segLineDef segDirection segOffset
exportSSector :: Handle -> (String, SSector) -> IO ()
exportSSector h (comma, SSector{..}) = do
hPrintf h " %s{segCount:%d,segStart:%d}\n" comma
ssectorSegCount ssectorSegStart
exportBlockmap :: Handle -> Maybe Blockmap -> IO ()
exportBlockmap h Nothing = hPrintf h "null"
exportBlockmap h (Just Blockmap{..}) = do
hPrintf h " {originX:%d,originY:%d,columns:%d,rows:%d,\n"
blockmapOriginX blockmapOriginY blockmapColumns blockmapRows
hPrintf h " lists:%s}" (show blockmapBlocklists)
exportLevel :: Handle -> Level -> IO ()
exportLevel h Level{..} = do
hPrintf h "var level_%s = {\n" (BS8.unpack levelName)
hPrintf h " things: [\n"
mapM_ (exportThing h) (zip (" ":repeat ",") levelThings)
hPrintf h " ],\n"
hPrintf h " vertices: [\n"
mapM_ (exportVertex h) (zip (" ":repeat ",") levelVertices)
hPrintf h " ],\n"
hPrintf h " linedefs: [\n"
mapM_ (exportLineDef h) (zip (" ":repeat ",") levelLineDefs)
hPrintf h " ],\n"
hPrintf h " sidedefs: [\n"
mapM_ (exportSideDef h) (zip (" ":repeat ",") levelSideDefs)
hPrintf h " ],\n"
hPrintf h " segs: [\n"
mapM_ (exportSeg h) (zip (" ":repeat ",") levelSegs)
hPrintf h " ],\n"
hPrintf h " ssectors: [\n"
mapM_ (exportSSector h) (zip (" ":repeat ",") levelSSectors)
hPrintf h " ],\n"
hPrintf h " sectors: [\n"
mapM_ (exportSector h) (zip (" ":repeat ",") levelSectors)
hPrintf h " ],\n"
hPrintf h " nodes: [\n"
mapM_ (exportNode h) (zip (" ":repeat ",") levelNodes)
hPrintf h " ],\n"
hPrintf h " reject: %s,\n" (maybe "[]" (show . BS.unpack . rejectBytes) levelReject)
hPrintf h " blockmap: \n"
exportBlockmap h levelBlockmap
hPrintf h "\n"
hPrintf h "};\n"
exportLevels :: Handle -> Map (CI LumpName) Level -> IO ()
exportLevels h mp = do
hPrintf h "var levels = {%s};\n"
(intercalate (","::String) $ (map (\ (_, Level{..}) ->
printf "%s: level_%s" (show levelName) (BS8.unpack levelName))
(zip ((" "::String):repeat",") $ Map.elems mp)))
exportPicture :: Handle -> Picture -> IO ()
exportPicture h Picture{..} = do
hPrintf h " width:%d,height:%d,leftOffset:%d,topOffset:%d,columns:[\n" pictureWidth pictureHeight pictureLeftOffset pictureTopOffset
forM_ (zip (" ":repeat ",") picturePosts) $ \ (comma1, plist) -> do
hPrintf h " %s[" (comma1 :: String)
forM_ (zip (" ":repeat ",") plist) $ \ (comma, Post{..}) -> do
hPrintf h "%s{top:%d,pixels:%s}" (comma::String) postTop (show (BS.unpack postPixels))
hPrintf h "]\n"
hPrintf h " ]"
exportSprite :: Handle -> (String, (CI LumpName, Sprite)) -> IO ()
exportSprite h (comma1, (_, Sprite{..})) = do
hPrintf h " %s%s: {name: %s,\n" comma1 (show spriteName) (show spriteName)
exportPicture h spritePicture
hPrintf h "\n }\n"
exportSprites :: Handle -> Map (CI LumpName) Sprite -> IO ()
exportSprites h mp = do
hPrintf h "var sprites = {\n"
forM_ (zip (" ":repeat ",") $ Map.toList mp) (exportSprite h)
hPrintf h " };\n"
exportPatch :: Handle -> (String, (CI LumpName, Patch)) -> IO ()
exportPatch h (comma1, (_, Patch{..})) = do
hPrintf h " %s%s: {name: %s,\n" comma1 (show patchName) (show patchName)
exportPicture h patchPicture
hPrintf h "\n }\n"
exportPatches :: Handle -> Map (CI LumpName) Patch -> IO ()
exportPatches h mp = do
hPrintf h "var patches = {\n"
forM_ (zip (" ":repeat ",") $ Map.toList mp) (exportPatch h)
hPrintf h "};\n"
exportTexture :: Handle -> (String, (CI LumpName, Texture)) -> IO ()
exportTexture h (comma1, (_, Texture{..})) = do
hPrintf h " %s%s: {name:%s,width:%d,height:%d,patches:[\n" comma1 (show textureName) (show textureName) textureWidth textureHeight
forM_ (zip (" ":repeat ",") texturePatchDescriptors) $ \ (comma, PatchDescriptor{..}) ->
hPrintf h " %s{xoffset:%d,yoffset:%d,pname:%d,stepdir:%d,colormap:%d}\n" (comma :: String)
patchDescriptorXOffset patchDescriptorYOffset patchDescriptorPNameIndex patchDescriptorStepDir patchDescriptorColorMap
hPrintf h " ]}\n"
exportTextures :: Handle -> Map (CI LumpName) Texture -> IO ()
exportTextures h mp = do
hPrintf h "var textures = {\n"
forM_ (zip (" ":repeat ",") $ Map.toList mp) (exportTexture h)
hPrintf h "};\n"
exportPNames :: Handle -> Map Int LumpName -> IO ()
exportPNames h mp = do
hPrintf h "var pnames = [\n"
forM_ (zip (" " : repeat ",") $ Map.elems mp) $ \ (comma, ln) -> do
hPrintf h "%s%s" (comma :: String) (show ln)
hPrintf h " ];\n"
exportFlats :: Handle -> Map (CI LumpName) Flat -> IO ()
exportFlats h mp = do
hPrintf h "var flats = {\n"
forM_ (zip (" ":repeat ",") (Map.elems mp)) $ \ (comma, Flat{..}) -> do
hPrintf h " %s%s:{name:%s,data:%s}\n" (comma :: String)
(show flatName) (show flatName) (show (BS.unpack flatData))
hPrintf h " }\n"
exportColormap :: Handle -> Maybe Colormap -> IO ()
exportColormap _ Nothing = return ()
exportColormap h (Just (Colormap bs)) = do
hPrintf h "var colormap = \n"
hPrintf h " %s\n" (show (map BS.unpack bs))
hPrintf h " ;\n"
exportPalettes :: Handle -> Maybe Palettes -> IO ()
exportPalettes _ Nothing = return ()
exportPalettes h (Just (Palettes pals)) = do
hPrintf h "var palettes = [\n"
forM_ (zip (" ":repeat ",") pals) $ \ (comma, pal) -> do
hPrintf h " %s[" (comma :: String)
forM_ (zip (" ":repeat ",") pal) $ \ (comma', (r,g,b)) -> do
hPrintf h "%s[%d,%d,%d]" (comma' :: String) r g b
hPrintf h "]\n"
hPrintf h " ];\n"
|
mgrabmueller/waddle
|
Game/Waddle/ExportJS.hs
|
bsd-3-clause
| 12,220 | 0 | 20 | 2,229 | 3,628 | 1,807 | 1,821 | 210 | 2 |
{-# LANGUAGE DeriveGeneric, KindSignatures, TemplateHaskell,
QuasiQuotes, FlexibleInstances, TypeOperators, TypeSynonymInstances,
MultiParamTypeClasses, FunctionalDependencies, OverlappingInstances,
ScopedTypeVariables, EmptyDataDecls, DefaultSignatures, ViewPatterns,
UndecidableInstances, FlexibleContexts, StandaloneDeriving, IncoherentInstances,
DeriveDataTypeable, RankNTypes #-}
module Main where
import Test.Framework (defaultMain, testGroup, defaultMainWithArgs)
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.QuickCheck
import Test.HUnit
import Debug.Trace.Helpers
import Debug.Trace
import Test.QuickCheck.Checkers
import Data.List
--import Language.C.Simple.CType.Build
--import Language.C.Simple.CType
import Language.C.Simple.CValue
import Foreign.C.Types
import GHC.Generics
import Data.DeriveTH
import Control.Applicative
--main = quickCheck propertyToCValue_3
main = defaultMainWithArgs tests ["-a 100", "-o 5"]
tests = [
testGroup "ToCValue" [
testProperty "propertyToCValue_0" propertyToCValue_0,
testProperty "propertyToCValue_1" propertyToCValue_1,
testProperty "propertyToCValue_2" propertyToCValue_2,
testProperty "propertyToCValue_3" propertyToCValue_3,
testProperty "propertyToCValue_4" propertyToCValue_4,
testProperty "propertyToCValue_5" propertyToCValue_5,
testProperty "propertyToCValue_6" propertyToCValue_6,
testProperty "propertyToCValue_7" propertyToCValue_7,
testProperty "propertyToCValue_8" propertyToCValue_8,
testProperty "propertyToCValue_9" propertyToCValue_9,
testProperty "propertyToCValue_10" propertyToCValue_10,
testProperty "propertyToCValue_11" propertyToCValue_11,
testProperty "propertyToCValue_12" propertyToCValue_12,
testProperty "propertyToCValue_13" propertyToCValue_13,
testProperty "propertyToCValue_14" propertyToCValue_14,
testCase "testFromCValuePrimitive_0" testFromCValuePrimitive_0,
testCase "testFromCValuePrimitive_1" testFromCValuePrimitive_1,
testCase "testFromCValueTest0" testFromCValueTest0,
testCase "testFromCValueTest1" testFromCValueTest1,
testCase "testFromCValueTest2" testFromCValueTest2,
testCase "testFromCValueTest3" testFromCValueTest3,
testCase "testFromCValueTest4" testFromCValueTest4
]
]
roundTrip :: (ToCValue a, FromCValue a, Eq a) => a -> Bool
roundTrip x = case (fromCValue $ toCValue x) of
Right y -> x == y
Left y -> error y
propertyToCValue_0 :: Test0 -> Bool
propertyToCValue_0 = roundTrip
propertyToCValue_1 :: Test1 -> Bool
propertyToCValue_1 = roundTrip
propertyToCValue_2 :: Test2 -> Bool
propertyToCValue_2 = roundTrip
propertyToCValue_3 :: Test3 -> Bool
propertyToCValue_3 = roundTrip
propertyToCValue_4 :: Test4 -> Bool
propertyToCValue_4 = roundTrip
propertyToCValue_5 :: Test5 -> Bool
propertyToCValue_5 = roundTrip
propertyToCValue_6 :: Test6 -> Bool
propertyToCValue_6 = roundTrip
propertyToCValue_7 :: Test7 -> Bool
propertyToCValue_7 = roundTrip
propertyToCValue_8 :: Test8 -> Bool
propertyToCValue_8 = roundTrip
propertyToCValue_9 :: Test9 -> Bool
propertyToCValue_9 = roundTrip
propertyToCValue_10 :: Test10 -> Bool
propertyToCValue_10 = roundTrip
propertyToCValue_11 :: Test11 -> Bool
propertyToCValue_11 = roundTrip
propertyToCValue_12 :: Test12 -> Bool
propertyToCValue_12 = roundTrip
propertyToCValue_13 :: Test13 -> Bool
propertyToCValue_13 = roundTrip
propertyToCValue_14 :: Test14 -> Bool
propertyToCValue_14 = roundTrip
testFromCValuePrimitive_0 = actual @?= Right expected where
actual = fromCValue initial
expected = PChar $ fromIntegral 1
initial = VPrimitive $ expected
testFromCValuePrimitive_1 = actual @?= Right expected where
actual = fromCValue initial
expected = PInt $ fromIntegral 1
initial = VPrimitive $ expected
testFromCValueTest0 = actual @?= Right expected where
actual = fromCValue initial
expected = Test0 $ fromIntegral 1
initial = VUnion [Lft] (VPrimitive (PInt (1)))
testFromCValueTest1 = actual @?= Right expected where
actual = fromCValue initial
expected = Test1 (fromIntegral 1) (fromIntegral 2)
initial = VUnion [Lft] $ VStruct [
VMember $ VPrimitive $ PInt 1,
VMember $ VPrimitive $ PInt 2]
testFromCValueTest2 = actual @?= Right expected where
actual = fromCValue initial
expected = Option0 $ Test0 $ fromIntegral 1
initial = VUnion [Lft] $ VUnion [Lft] $ VPrimitive $ PInt $ 1
testFromCValueTest3 = actual @?= Right expected where
actual = fromCValue initial
expected = Option1 $ Test1 (fromIntegral 1) (fromIntegral 2)
initial = VUnion [Rght] $ VUnion [Lft] $ VStruct [
VMember $ VPrimitive $ PInt 1,
VMember $ VPrimitive $ PInt 2]
testFromCValueTest4 = actual @?= Right expected where
actual = fromCValue initial
expected = OptionB (Test1 0 (-1))
initial = VUnion [Lft, Rght] (VUnion [Lft] (VStruct [VMember (VPrimitive (PInt 0)),
VMember (VPrimitive (PInt (-1)))]))
data Test0 = Test0 CInt
deriving(Eq, Show, Generic)
instance (ToCValue a, ToCValue b) => ToCValue (a, b)
instance (FromCValue a, FromCValue b) => FromCValue (a, b)
instance ToCValue Test0
instance FromCValue Test0
instance Arbitrary CInt where
arbitrary = fromIntegral <$> (arbitrary :: Gen Int)
instance Arbitrary Test0 where
arbitrary = Test0 <$> arbitrary
data Test1 = Test1 CInt CInt
deriving(Eq, Show, Generic)
instance ToCValue Test1
instance FromCValue Test1
instance Arbitrary Test1 where
arbitrary = Test1 <$> arbitrary <*> arbitrary
data Test2 = Test2 (CInt, CInt)
deriving(Eq, Show, Generic)
instance ToCValue Test2
instance FromCValue Test2
instance Arbitrary Test2 where
arbitrary = Test2 <$> arbitrary
data Test3 = Option0 Test0
| Option1 Test1
deriving(Eq, Show, Generic)
instance ToCValue Test3
instance FromCValue Test3
instance Arbitrary Test3 where
arbitrary = do
test <- arbitrary
if test
then Option0 <$> arbitrary
else Option1 <$> arbitrary
data Test4 = OptionA Test0
| OptionB Test1
| OptionC Test2
| OptionD Test3
deriving(Eq, Show, Generic)
instance ToCValue Test4
instance FromCValue Test4
instance Arbitrary Test4 where
arbitrary = do
l <- choose(0, 3 :: Int)
case l of
0 -> OptionA <$> arbitrary
1 -> OptionB <$> arbitrary
2 -> OptionC <$> arbitrary
3 -> OptionD <$> arbitrary
data Test5 = Test5 [CInt]
deriving(Eq, Show, Generic)
instance ToCValue Test5
instance FromCValue Test5
instance Arbitrary Test5 where
arbitrary = Test5 <$> arbitrary
newtype Test6 = Test6 Test5
deriving(Eq, Show, Generic)
instance ToCValue Test6
instance FromCValue Test6
instance Arbitrary Test6 where
arbitrary = Test6 <$> arbitrary
data Test7 = Test7
{
test0 :: Test0,
test1 :: Test1,
test2 :: Test2,
test3 :: Test3,
test4 :: Test4,
test5 :: Test5,
test6 :: Test6
}
deriving(Eq, Show, Generic)
instance ToCValue Test7
instance FromCValue Test7
instance Arbitrary Test7 where
arbitrary = Test7 <$> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary
data Test8 = Test8 {run :: CInt}
deriving(Eq, Show, Generic)
instance ToCValue Test8
instance FromCValue Test8
instance Arbitrary Test8 where
arbitrary = Test8 <$> arbitrary
data Test9 = Test9
{
testA :: Test0,
testB :: Test1,
testC :: Test2
}
deriving(Eq, Show, Generic)
instance ToCValue Test9
instance FromCValue Test9
instance Arbitrary Test9 where
arbitrary = Test9 <$> arbitrary <*> arbitrary <*> arbitrary
data Test10 = Test10
{
testA1 :: Test0
}
deriving(Eq, Show, Generic)
instance ToCValue Test10
instance FromCValue Test10
instance Arbitrary Test10 where
arbitrary = Test10 <$> arbitrary
data Test11 = Test11
{
testA1A :: Test0,
testA1B :: Test1
}
deriving(Eq, Show, Generic)
instance ToCValue Test11
instance FromCValue Test11
instance Arbitrary Test11 where
arbitrary = Test11 <$> arbitrary <*> arbitrary
data Test12 = Test12
{
testAA :: Test2
}
deriving(Eq, Show, Generic)
instance ToCValue Test12
instance FromCValue Test12
instance Arbitrary Test12 where
arbitrary = Test12 <$> arbitrary
data Test13 = Test13
{
testAAA :: Test1,
testAAB :: Test2
}
deriving(Eq, Show, Generic)
instance ToCValue Test13
instance FromCValue Test13
instance Arbitrary Test13 where
arbitrary = Test13 <$> arbitrary <*> arbitrary
data Test14 = Test14 CInt CInt CInt
deriving(Eq, Show, Generic)
instance ToCValue Test14
instance FromCValue Test14
instance Arbitrary Test14 where
arbitrary = Test14 <$> arbitrary <*> arbitrary <*> arbitrary
data Test15 = Thing0
| Thing1
| Thing2
deriving(Eq, Show, Generic)
data Test16 = Blah0
| Blah1
| Blah2
| Blah3
| Blah4
| Blah5
deriving(Eq, Show, Generic)
|
jfischoff/simple-c-value
|
tests/Main.hs
|
bsd-3-clause
| 9,666 | 0 | 19 | 2,353 | 2,369 | 1,259 | 1,110 | 250 | 2 |
import Control.Monad
import LispError
import LispEval
import LispVal
import LispParser
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck as QC
import Text.Parsec
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup "Tests" [properties, unitTests]
properties :: TestTree
properties = testGroup "Properties" [qcProps]
pAtom = parse parseAtom "atom err"
pChr = parse parseChar "character err"
pStr = parse parseString "string err"
pList = parse parseList "list err"
pPair = parse parseDottedList "pair err"
pNum = parse parseNumber "number err"
pRadixNum = parse parseRadixNumber "radix number err"
pExpr = parse parseExpr "expression err"
showExpr :: String -> String
showExpr s = case pExpr s of
Left err -> show err
Right val -> show val
evalExpr :: String -> Either LispError LispVal
evalExpr e = readExpr e >>= eval
unitTests = testGroup "Unit tests"
[ testCase "Simple atom is parsed" $
(Right (Atom "foo")) @=? (pAtom "foo")
, testCase "Character '#\\a' is parsed" $
(Right (Character 'a')) @=? (pChr "#\\a ")
, testCase "Character '#\\A' is parsed" $
(Right (Character 'A')) @=? (pChr "#\\A ")
, testCase "Character '#\\(' is parsed" $
(Right (Character '(')) @=? (pChr "#\\( ")
, testCase "Character '#\\ ' is parsed" $
(Right (Character ' ')) @=? (pChr "#\\ ")
-- TODO: fix parsing of these
-- , testCase "Character '#\\space' is parsed" $
-- (Right (Character ' ')) @=? (pChr "#\\space")
-- , testCase "Character '#\\newline' is parsed" $
-- (Right (Character '\n')) @=? (pChr "#\\newline")
, testCase "Boolean true is parsed" $
(Right (Bool True)) @=? (pAtom "#t")
, testCase "Boolean false is parsed" $
(Right (Bool False)) @=? (pAtom "#f")
, testCase "Simple string is parsed" $
(Right (String "str")) @=? (pStr "\"str\"")
, testCase "String with escaped quote is parsed" $
(Right (String "st\"r")) @=? (pStr "\"st\\\"r\"")
, testCase "Number is parsed" $
(Right (Number 1337)) @=? (pNum "1337")
, testCase "Base 16 number is parsed" $
(Right (Number 255)) @=? (pRadixNum "#xff")
, testCase "List (no parens) is parsed" $
(Right (List [Atom "f", Atom "x"])) @=? (pList "f x")
, testCase "List (parens) is parsed" $
(Right (List [Atom "f", Atom "x"])) @=? (pExpr "(f x)")
, testCase "Dotted list is parsed" $
(Right (DottedList [Atom "id"] $ Number 7)) @=? (pPair "id . 7")
, testCase "Expression is parsed" $
(Right (List [Atom "e", Number 2, Atom "x"])) @=? (pExpr "(e 2 x)")
, testCase "Y-combinator is parsed" $
(Right (List [Atom "define", Atom "Y",
List [Atom "lambda", List[Atom "le"],
List [List [Atom "lambda", List [Atom "f"], List [Atom "f", Atom "f"]],
List [Atom "lambda", List [Atom "f"],
List [Atom "le", List [Atom "lambda", List [Atom "x"],
List [List [Atom "f", Atom "f"], Atom "x"]]]]]]]))
@=?
(pExpr $
"(define Y \
\ (lambda (le) \
\ ((lambda (f) (f f)) \
\ (lambda (f) \
\ (le (lambda (x) \
\ ((f f) x)))))))")
, testCase "Expression is parsed and showed" $
"(quote (1 3 (\"this\" \"one\")))" @=? (showExpr "'(1 3 (\"this\" \"one\"))")
, testCase "String equality" $
(Right (Bool True)) @=? (evalExpr "(string=? \"test\" \"test\")")
, testCase "Less than is true" $
(Right (Bool True)) @=? (evalExpr "(< 2 3)")
, testCase "Greater than is true" $
(Right (Bool True)) @=? (evalExpr "(> 3 2)")
, testCase "Greater or equal than is true" $
(Right (Bool True)) @=? (evalExpr "(>= 3 3)")
, testCase "String less than is true" $
(Right (Bool True)) @=? (evalExpr "(string<? \"abc\" \"bba\")")
, testCase "'if' false branch" $
(Right (String "yes")) @=? (evalExpr "(if (> 2 3) \"no\" \"yes\")")
, testCase "'if' true branch" $
(Right (Number 9)) @=? (evalExpr "(if (= 3 3) (+ 2 3 (- 5 1)) \"unequal\")")
, testCase "'car' returns first element of list" $
(Right (Atom "a")) @=? (evalExpr "(car '(a b c))")
, testCase "'car' returns element from singleton list" $
(Right (Atom "a")) @=? (evalExpr "(car '(a))")
, testCase "'car' returns first element from dotted list" $
(Right (Atom "a")) @=? (evalExpr "(car '(a b . c))")
, testCase "'car' throws an error for non list argument" $
(Left (TypeMismatch "pair" (Atom "a"))) @=? (evalExpr "(car 'a)")
, testCase "'car' throws error for more that one argument" $
(Left (NumArgs 1 [Atom "a", Atom "b"])) @=? (evalExpr "(car 'a 'b)")
, testCase "'cdr' returns rest of list" $
(Right (List [Atom "b", Atom "c"])) @=? (evalExpr "(cdr '(a b c))")
, testCase "'cdr' returns singleton list with last element from list of two elements" $
(Right (List [Atom "b"])) @=? (evalExpr "(cdr '(a b))")
, testCase "'cdr' returns NIL from singleton list" $
(Right (List [])) @=? (evalExpr "(cdr '(a))")
, testCase "'cdr' returns second element of pair" $
(Right (Atom "b")) @=? (evalExpr "(cdr '(a . b))")
, testCase "'cdr' returns dotted list from dotted list" $
(Right (DottedList [Atom "b"] (Atom "c"))) @=? (evalExpr "(cdr '(a b . c))")
, testCase "'cdr' throws an error for non list argument" $
(Left (TypeMismatch "pair" (Atom "a"))) @=? (evalExpr "(cdr 'a)") -- = error – not a list
, testCase "'cdr' throws error for more that one argument " $
(Left (NumArgs 1 [Atom "a", Atom "b"])) @=? (evalExpr "(cdr 'a 'b)") -- = error – too many arguments
, testCase "'cons' builds a pair" $
(Right (DottedList [List [Atom "this", Atom "is"]] $ Atom "test")) @=? (evalExpr "(cons '(this is) 'test)")
, testCase "'cons' builds a list" $
(Right (List [List [Atom "this", Atom "is"]])) @=? (evalExpr "(cons '(this is) '())")
, testCase "'eqv?' returns #f for unequal numbers" $
(Right (Bool False)) @=? (evalExpr "(eqv? 1 3)")
, testCase "'eqv?' returns #t for equal numbers" $
(Right (Bool True)) @=? (evalExpr "(eqv? 3 3)")
, testCase "'eqv?' returns #t for equal atoms" $
(Right (Bool True)) @=? (evalExpr "(eqv? 'atom 'atom)")
, testCase "'eqv?' returns #f for different types" $
(Right (Bool False)) @=? (evalExpr "(eqv? 2 \"2\")")
, testCase "'equal?' returns #t for different types" $
(Right (Bool True)) @=? (evalExpr "(equal? 2 \"2\")")
]
qcProps = testGroup "(checked by QuickCheck)"
[ QC.testProperty "An atom is parsed" $
forAll atomString (\s -> pAtom s == Right (Atom s))
, QC.testProperty "A string is parsed" $
forAll stringString (\s -> pStr ("\"" ++ s ++ "\"") == Right (String s))
]
-- from https://wiki.haskell.org/QuickCheck_as_a_test_set_generator
neStringOf :: [a] -> [a] -> Gen [a]
neStringOf charsStart charsRest =
do s <- elements charsStart
r <- listOf' $ elements charsRest
return (s:r)
listOf' :: Gen a -> Gen [a]
listOf' gen = sized $ \n ->
do k <- choose (0, n)
vectorOf' k gen
vectorOf' :: Int -> Gen a -> Gen [a]
vectorOf' k gen = sequence [ gen | _ <- [1..k] ]
instance Arbitrary LispVal where
arbitrary = liftM Atom atomString
-- n <- choose $ oneof "abc"
-- return $ Atom n
atomString :: Gen String
atomString = neStringOf letters $ letters ++ symbols
-- TODO: expand to include escaped characters
stringString :: Gen String
stringString = listOf $ elements $ letters ++ symbols
--prop_number n = parse parseNumber "number" n == Right (Number n)
-- quickCheck (prop_atom :: String -> Bool)
-- generate arbitrary :: IO LispVal
-- sample atomString
|
rashack/scheme-in-48h
|
test/Spec.hs
|
bsd-3-clause
| 7,809 | 0 | 28 | 1,926 | 2,348 | 1,186 | 1,162 | 146 | 2 |
{-# LANGUAGE OverloadedStrings, StandaloneDeriving #-}
module Web.Neo.Internal where
import Web.Rest (
RestT,rest,
runRestT,Hostname,Port,RestError,
Request(Request),Method(POST,PUT,GET),Location,ContentType,Body,
Response(code,responseType,responseBody),
ResponseCode)
import Control.Error (EitherT,runEitherT,left,readErr)
import Data.Aeson (
Value,
ToJSON,object,(.=),encode,
FromJSON(parseJSON),
withObject,withText,
(.:),eitherDecode)
import Data.Aeson.Types (Parser)
import Control.Monad (when,(>=>))
import Control.Monad.Trans (lift)
import Control.Monad.IO.Class (MonadIO)
import Data.Function (on)
import Data.Text (Text,append,pack,unpack,isPrefixOf)
import qualified Data.Text as Text (takeWhile,reverse)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy as BL (toStrict,fromStrict)
import Data.HashMap.Strict (HashMap)
-- | Run the given neo commands against the given hostname and port.
runNeoT :: (MonadIO m) => Hostname -> Port -> NeoT m a -> m (Either RestError (Either NeoError a))
runNeoT hostname port = runRestT hostname port . runEitherT
-- | Run the given neo commands against hostname "localhost" and port 7474.
defaultRunNeoT :: (MonadIO m) => NeoT m a -> m (Either RestError (Either NeoError a))
defaultRunNeoT = runNeoT "localhost" 7474
-- | The neo monad transformer. Catches errors and uses REST calls.
type NeoT m = EitherT NeoError (RestT m)
-- | Type of things that can go wrong when talking to neo4j via the REST API.
data NeoError = ResponseCodeError ResponseCode Body
| ResponseTypeError (Maybe ContentType) Body
| ResponseParseError String
| ExtractIdError Text
deriving instance Show NeoError
-- | A neo4j node.
data Node = Node {
nodeId :: Integer,
nodeData :: Properties}
deriving instance Show Node
instance Eq Node where
(==) = (==) `on` nodeId
instance Ord Node where
(<=) = (<=) `on` nodeId
instance FromJSON Node where
parseJSON = withObject "NodeObject" (\o -> do
selfid <- o .: "self" >>= parseSelfId
nodedata <- o .: "data"
return (Node selfid nodedata))
-- | A neo4j edge.
data Edge = Edge {
edgeId :: Integer,
edgeStart :: Integer,
edgeEnd :: Integer,
edgeType :: Label,
edgeData :: Properties}
deriving instance Show Edge
instance Eq Edge where
(==) = (==) `on` edgeId
instance Ord Edge where
(<=) = (<=) `on` edgeId
instance FromJSON Edge where
parseJSON = withObject "EdgeObject" (\o -> do
selfid <- o .: "self" >>= parseSelfId
startid <- o .: "start" >>= parseSelfId
endid <- o .: "end" >>= parseSelfId
label <- o .: "type"
edgedata <- o .: "data"
return (Edge selfid startid endid label edgedata))
-- | The properties of either a node or an edge. A map from 'Text' keys to
-- json values.
type Properties = HashMap Text Value
-- | A label of either a node or an edge.
type Label = Text
-- | Create a new node.
newNode :: (Monad m) => NeoT m Node
newNode = call (jsonRequest POST "/db/data/node" "") (2,0,1)
-- | Set the property of the given node with the given key to the given value.
setNodeProperty :: (Monad m,ToJSON value) => Text -> value -> Node -> NeoT m ()
setNodeProperty key value node = setProperty key value (nodeURI node)
-- | Add a label to a node.
addNodeLabel :: (Monad m) => Label -> Node -> NeoT m ()
addNodeLabel label node = emptyCall addNodeLabelRequest where
addNodeLabelRequest = jsonRequest POST (nodeURI node `append` "/labels") (strictEncode label)
-- | Create a new edge.
newEdge :: (Monad m) => Label -> Node -> Node -> NeoT m Edge
newEdge label sourcenode targetnode = call newEdgeRequest (2,0,1) where
newEdgeRequest = jsonRequest POST sourceuri (strictEncode payload)
sourceuri = (nodeURI sourcenode `append` "/relationships")
payload = object [
"to" .= nodeURI targetnode,
"type" .= label]
-- | Set the property of the given edge with the given key to the given value.
setEdgeProperty :: (Monad m,ToJSON value) => Text -> value -> Edge -> NeoT m ()
setEdgeProperty key value edge = setProperty key value (edgeURI edge)
-- | Get the node with the given neo4j internal ID.
nodeById :: (Monad m) => Integer -> NeoT m Node
nodeById nodeid = call (jsonGetRequest ("/db/data/node/" `append` (pack (show nodeid)))) (2,0,0)
-- | Get all nodes with the given label.
nodesByLabel :: (Monad m) => Label -> NeoT m [Node]
nodesByLabel label = call (jsonGetRequest ("/db/data/label/" `append` label `append` "/nodes")) (2,0,0)
-- | Get the edge with the given neo4j internal ID.
edgeById :: (Monad m) => Integer -> NeoT m Edge
edgeById edgeid = call (jsonGetRequest ("/db/data/relationship/" `append` (pack (show edgeid)))) (2,0,0)
-- | Get all edges (outgoing as well as incoming) of the given node.
allEdges :: (Monad m) => Node -> NeoT m [Edge]
allEdges node = call (jsonGetRequest (nodeURI node `append` "/relationships/all")) (2,0,0)
-- | Get all incoming edges of the given node.
incomingEdges :: (Monad m) => Node -> NeoT m [Edge]
incomingEdges node = call (jsonGetRequest (nodeURI node `append` "/relationships/in")) (2,0,0)
-- | Get all outgoing edges of the given node.
outgoingEdges :: (Monad m) => Node -> NeoT m [Edge]
outgoingEdges node = call (jsonGetRequest (nodeURI node `append` "/relationships/out")) (2,0,0)
-- | Get all labels of the given node.
nodeLabels :: (Monad m) => Node -> NeoT m [Label]
nodeLabels node = call (jsonGetRequest (nodeURI node `append` "/labels")) (2,0,0)
-- | Get the properties of the given node.
nodeProperties :: (Monad m) => Node -> NeoT m Properties
nodeProperties = nodeById . nodeId >=> return . nodeData
-- | Get the source node of the given edge.
source :: (Monad m) => Edge -> NeoT m Node
source = nodeById . edgeStart
-- | Get the target node of the given edge.
target :: (Monad m) => Edge -> NeoT m Node
target = nodeById . edgeEnd
-- | Get the label of the given edge.
edgeLabel :: (Monad m) => Edge -> NeoT m Label
edgeLabel = edgeById . edgeId >=> return . edgeType
-- | Get the properties of the given edge.
edgeProperties :: (Monad m) => Edge -> NeoT m Properties
edgeProperties = edgeById . edgeId >=> return . edgeData
-- | Cypher query text.
type CypherQuery = Text
-- | Cypher query Parameters. Should be an object.
type CypherParameters = Value
-- | Result of a Cypher query. A list of column headers and a list of rows.
-- Each row is itself a list of json values, exactly one for each column
-- header. Each value in a row might have a very different shape.
data CypherResult = CypherResult {columnHeaders :: [Text], rowValues :: [[Value]]}
deriving instance Show CypherResult
instance FromJSON CypherResult where
parseJSON = withObject "CypherResultObject" (\o -> do
columnheaders <- o .: "columns"
rowvalues <- o .: "data"
return (CypherResult columnheaders rowvalues))
-- | Execute the given cypher query text with the given parameters. Returns
-- the result of the query.
cypher :: (Monad m) => CypherQuery -> CypherParameters -> NeoT m CypherResult
cypher cypherQuery cypherParameters = call cypherRequest (2,0,0) where
cypherRequest = jsonRequest POST "/db/data/cypher" (strictEncode payload)
payload = object [
"query" .= cypherQuery,
"params" .= cypherParameters]
-- | When the given expected 'ResponseCode' and the one of the given 'Response'
-- are not equal throw a 'ResponseCodeError'.
assertResponseCode :: (Monad m) => ResponseCode -> Response -> NeoT m ()
assertResponseCode expectedCode response = when
(expectedCode /= code response)
(left (ResponseCodeError (code response) (responseBody response)))
-- | When the 'ContentType' of the given 'Response' does not match the
-- given expected 'ContentType' or is not present at all throw a
-- 'ResponseTypeError'.
assertResponseType :: (Monad m) => ContentType -> Response -> NeoT m ()
assertResponseType expectedType response = case responseType response of
Nothing -> return ()
Just givenType -> when (not (expectedType `isPrefixOf` givenType))
(left (ResponseTypeError (responseType response) (responseBody response)))
-- | The content type of a json request or a json response.
jsoncontent :: ContentType
jsoncontent = "application/json"
-- | Encode the given value into a strict 'ByteString'
strictEncode :: (ToJSON a) => a -> ByteString
strictEncode = BL.toStrict . encode
-- | Decode the given strict 'ByteString'.
strictEitherDecode :: (FromJSON a) => ByteString -> Either String a
strictEitherDecode = eitherDecode . BL.fromStrict
-- | Make a REST call. Expect the given 'ResponseCode' and the response
-- to be json encoded.
call :: (FromJSON a,Monad m) => Request -> ResponseCode -> NeoT m a
call request expectedCode = do
response <- lift (rest request)
assertResponseCode expectedCode response
assertResponseType jsoncontent response
parseResponse response
-- | Make a REST call and expect an empty response.
emptyCall :: (Monad m) => Request -> NeoT m ()
emptyCall request = lift (rest request) >>= assertResponseCode (2,0,4)
-- | Make a json request where the given request body as well as the expected response
-- body are both json encoded.
jsonRequest :: Method -> Location -> Body -> Request
jsonRequest method location body = Request method location jsoncontent jsoncontent body
-- | Make a get request to the given location. The expected response body should be
-- json encoded.
jsonGetRequest :: Location -> Request
jsonGetRequest location = jsonRequest GET location ""
-- | Find a node's URI.
nodeURI :: Node -> Location
nodeURI node = "/db/data/node/" `append` (pack (show (nodeId node)))
-- | Find an edge's URI.
edgeURI :: Edge -> Location
edgeURI edge = "/db/data/relationship/" `append` (pack (show (edgeId edge)))
-- | Set the property of either an edge or a node at the given location.
setProperty :: (Monad m,ToJSON value) => Text -> value -> Location -> NeoT m ()
setProperty key value uri = emptyCall setPropertyRequest where
requestUri = uri `append` "/properties/" `append` key
setPropertyRequest = jsonRequest PUT requestUri (strictEncode value)
-- | Parse the body of a response but fail in the 'NeoT' monad instead of
-- returning an 'Either'.
parseResponse :: (FromJSON a,Monad m) => Response -> NeoT m a
parseResponse response = case strictEitherDecode (responseBody response) of
Left errormessage -> left (ResponseParseError errormessage)
Right result -> return result
-- | Given a json value that should represent a URI parse the last part of
-- it as a number.
parseSelfId :: Value -> Parser Integer
parseSelfId = withText "URI" (\s -> case idSlug s of
Left errormessage -> fail errormessage
Right idslug -> return idslug)
-- | Extract the last part of the given URI.
idSlug :: Text -> Either String Integer
idSlug uri = readErr ("Reading URI slug failed: " ++ uriSlug) uriSlug where
uriSlug = unpack (Text.reverse (Text.takeWhile (/= '/') (Text.reverse uri)))
|
phischu/haskell-neo
|
src/Web/Neo/Internal.hs
|
bsd-3-clause
| 11,125 | 0 | 14 | 2,158 | 2,965 | 1,626 | 1,339 | -1 | -1 |
module Language.Haskell.Liquid.TH (
lq
) where
import Control.Monad
import Data.List
import Data.Maybe
import Language.Haskell.TH
import Language.Haskell.TH.Quote
import Language.Haskell.TH.Syntax
import Language.Fixpoint.Types (val)
import Language.Haskell.Liquid.Misc
import Language.Haskell.Liquid.RefType (freeTyVars, quantifyRTy)
import Language.Haskell.Liquid.TH.Magic
import Language.Haskell.Liquid.TH.Misc
import Language.Haskell.Liquid.TH.Parse
import Language.Haskell.Liquid.TH.Simplify
import Language.Haskell.Liquid.TH.Types
import Language.Haskell.Liquid.TH.WiredIns
lq :: Bool -> QuasiQuoter
lq simplified =
QuasiQuoter
{ quoteType = lqType simplified
, quoteDec = lqDec simplified
, quoteExp = lqInvalid "expression"
, quotePat = lqInvalid "pattern"
}
lqInvalid :: String -> String -> Q a
lqInvalid ctxt _ = fail $
"`lq` quasiquoter cannot be used in the " ++ ctxt ++ " context"
lqDec :: Bool -> String -> Q [Dec]
lqDec = parseDecs
lqType :: Bool -> String -> Q Type
lqType simplified src = do
(names, src') <- extractContext
ty <- parseType simplified src'
newTVs <- filterM (fmap isNothing . lookupTypeName) (freeTyVars $ val ty)
ann <- dataToExpQ' $ LiquidVar False False ty $ locatedSpan ty
mapM_ (`forceAddAnnotation` ann) (map ValueAnnotation names)
return $ simplifyRTy $ quantifyRTy newTVs $ val ty
where
extractContext
| simplified =
return ([], src)
| lqTypeParsePrefix `isPrefixOf` src = do
let (names, _:src') = break (== '|') $ drop (length lqTypeParsePrefix) src
return (mkName <$> split ',' names, src')
| otherwise = fail contextError
contextError = unlines
[ "The `lq` quasiquoter cannot generally be used in the type context."
, " Exceptions are type signatures for local variables, methods in class and"
, " instance declarations, constructors in GADTs, and fields in record "
, " constructors, to work around restrictions in the current version of GHC."
, " In these cases, the quasiquotation must appear directly after the `::` in"
, " the type signature."
]
|
spinda/liquidhaskell
|
src/Language/Haskell/Liquid/TH.hs
|
bsd-3-clause
| 2,339 | 0 | 16 | 621 | 537 | 296 | 241 | 51 | 1 |
module Try
(
displayHelp
, echoHelp
, dec2binHelp
, bin2decHelp
, googleHelp
, gmt_timeHelp
, local_timeHelp
, cowsNbullsHelp
, guesssHelp
, randomCoinHelp
, rDieHelp
, weatherHelp
) where
import System.Random
import System.IO
import Data.List
displayHelp :: String
displayHelp =
"Implemented Commands:- !commands, !echo, !dec2bin, !bin2dec, !google, !gmt_time, !local_time, !cowsNbulls, !cowsNbullsInst, !guess, !randomCoin, !rDie, !weather. For More Information about each command, type command@help "
commandsHelp :: String
commandsHelp =
"!commands: It displays the list of all commands available.Syntax: !commands"
weatherHelp :: String
weatherHelp = "!weather: It displays the forecast of the place at that particular point of time.Syntax: !weather"
echoHelp :: String
echoHelp =
"!echo: It repeats the same text again.Syntax: !echo string. Example: !echo Hi! How are you? "
dec2binHelp :: String
dec2binHelp =
"!dec2bin: It displays the binary form of the given decimal number.Note:Only positive integers are accepted.Syntax: !dec2bin number. Example: !dec2bin 17\n"
bin2decHelp :: String
bin2decHelp =
"!bin2dec: It displays the decimal form of the given binary number.Syntax: !bin2dec binary. Example: !bin2dec 101110010\n"
googleHelp :: String
googleHelp =
"!google: It search the given text on google and displays the first link from search result. Syntax: !google text Example: !google Learn Haskell\n"
gmt_timeHelp :: String
gmt_timeHelp =
"!gmt_time: It displays the GMT Time in UTC Format. Syntax: !gmt_time"
local_timeHelp :: String
local_timeHelp =
"!local_time: It displays the Indian Standard Time in IST Format. Syntax: !local_time"
cowsNbullsHelp :: String
cowsNbullsHelp =
"!cowsNbulls: It starts a Cows N Bulls game. Enjoy!! Syntax: !cowsNbulls"
guesssHelp :: String
guesssHelp =
"!guess: It is used to enter a number as input in Cows N Bulls game.Syntax: !guess number. Example: !guess 4325"
randomCoinHelp :: String
randomCoinHelp =
"!randomCoin: It flips a coin and show what appears-Head or Tail.Syntax: !randomCoin\n"
rDieHelp :: String
rDieHelp =
"Command: !rDie.Function: It rolls a dice and show the number that appears.Syntax: !randomCoin\n"
cowsNbullsInstHelp :: String
cowsNbullsInstHelp =
"Command: !cowsNbullsInstructions\n"++
"Function: It displays instructions for the Cows N Bulls game.\n"++
"Syntax: !cowsNbullsInstructions\n"
cowsNbullsInst :: String
cowsNbullsInst =
""
|
Sahil-yerawar/IRChbot
|
src/Try.hs
|
bsd-3-clause
| 2,474 | 40 | 6 | 389 | 255 | 151 | 104 | 63 | 1 |
{-# LANGUAGE OverloadedStrings, RecordWildCards, DeriveDataTypeable, ConstraintKinds, FlexibleContexts #-}
module Test.WebDriver.Exceptions.Internal
( InvalidURL(..), HTTPStatusUnknown(..), HTTPConnError(..)
, UnknownCommand(..), ServerError(..)
, FailedCommand(..), failedCommand, mkFailedCommandInfo
, FailedCommandType(..), FailedCommandInfo(..), StackFrame(..)
) where
import Test.WebDriver.Session
import Test.WebDriver.JSON
import Data.Aeson
import Data.Aeson.Types (Parser, typeMismatch)
import Data.ByteString.Lazy.Char8 (ByteString)
import Data.Text (Text)
import qualified Data.Text.Lazy.Encoding as TLE
import Control.Exception (Exception)
import Control.Exception.Lifted (throwIO)
import Control.Applicative
import Data.Typeable (Typeable)
import Data.Maybe (fromMaybe, catMaybes)
import Data.Word
import Prelude -- hides some "unused import" warnings
instance Exception InvalidURL
-- |An invalid URL was given
newtype InvalidURL = InvalidURL String
deriving (Eq, Show, Typeable)
instance Exception HTTPStatusUnknown
-- |An unexpected HTTP status was sent by the server.
data HTTPStatusUnknown = HTTPStatusUnknown Int String
deriving (Eq, Show, Typeable)
instance Exception HTTPConnError
-- |HTTP connection errors.
data HTTPConnError = HTTPConnError String Int
deriving (Eq, Show, Typeable)
instance Exception UnknownCommand
-- |A command was sent to the WebDriver server that it didn't recognize.
newtype UnknownCommand = UnknownCommand String
deriving (Eq, Show, Typeable)
instance Exception ServerError
-- |A server-side exception occured
newtype ServerError = ServerError String
deriving (Eq, Show, Typeable)
instance Exception FailedCommand
-- |This exception encapsulates a broad variety of exceptions that can
-- occur when a command fails.
data FailedCommand = FailedCommand FailedCommandType FailedCommandInfo
deriving (Show, Typeable)
-- |The type of failed command exception that occured.
data FailedCommandType = NoSuchElement
| NoSuchFrame
| UnknownFrame
| StaleElementReference
| ElementNotVisible
| InvalidElementState
| UnknownError
| ElementIsNotSelectable
| JavascriptError
| XPathLookupError
| Timeout
| NoSuchWindow
| InvalidCookieDomain
| UnableToSetCookie
| UnexpectedAlertOpen
| NoAlertOpen
| ScriptTimeout
| InvalidElementCoordinates
| IMENotAvailable
| IMEEngineActivationFailed
| InvalidSelector
| SessionNotCreated
| MoveTargetOutOfBounds
| InvalidXPathSelector
| InvalidXPathSelectorReturnType
deriving (Eq, Ord, Enum, Bounded, Show)
-- |Detailed information about the failed command provided by the server.
data FailedCommandInfo =
FailedCommandInfo { -- |The error message.
errMsg :: String
-- |The session associated with
-- the exception.
, errSess :: Maybe WDSession
-- |A screen shot of the focused window
-- when the exception occured,
-- if provided.
, errScreen :: Maybe ByteString
-- |The "class" in which the exception
-- was raised, if provided.
, errClass :: Maybe String
-- |A stack trace of the exception.
, errStack :: [StackFrame]
}
-- |Provides a readable printout of the error information, useful for
-- logging.
instance Show FailedCommandInfo where
show i = showChar '\n'
. showString "Session: " . sess
. showChar '\n'
. showString className . showString ": " . showString (errMsg i)
. showChar '\n'
. foldl (\f s-> f . showString " " . shows s) id (errStack i)
$ ""
where
className = fromMaybe "<unknown exception>" . errClass $ i
sess = case errSess i of
Nothing -> showString "None"
Just WDSession{..} ->
let sessId = maybe "<no session id>" show wdSessId
in showString sessId . showString " at "
. shows wdSessHost . showChar ':' . shows wdSessPort
-- |Constructs a FailedCommandInfo from only an error message.
mkFailedCommandInfo :: (WDSessionState s) => String -> s FailedCommandInfo
mkFailedCommandInfo m = do
sess <- getSession
return $ FailedCommandInfo { errMsg = m
, errSess = Just sess
, errScreen = Nothing
, errClass = Nothing
, errStack = [] }
-- |Convenience function to throw a 'FailedCommand' locally with no server-side
-- info present.
failedCommand :: (WDSessionStateIO s) => FailedCommandType -> String -> s a
failedCommand t m = throwIO . FailedCommand t =<< mkFailedCommandInfo m
-- |An individual stack frame from the stack trace provided by the server
-- during a FailedCommand.
data StackFrame = StackFrame { sfFileName :: String
, sfClassName :: String
, sfMethodName :: String
, sfLineNumber :: Int
}
deriving (Eq)
instance Show StackFrame where
show f = showString (sfClassName f) . showChar '.'
. showString (sfMethodName f) . showChar ' '
. showParen True ( showString (sfFileName f) . showChar ':'
. shows (sfLineNumber f))
$ "\n"
instance FromJSON FailedCommandInfo where
parseJSON (Object o) =
FailedCommandInfo <$> (req "message" >>= maybe (return "") return)
<*> pure Nothing
<*> (fmap TLE.encodeUtf8 <$> opt "screen" Nothing)
<*> opt "class" Nothing
<*> (catMaybes <$> opt "stackTrace" [])
where req :: FromJSON a => Text -> Parser a
req = (o .:) --required key
opt :: FromJSON a => Text -> a -> Parser a
opt k d = o .:?? k .!= d --optional key
parseJSON v = typeMismatch "FailedCommandInfo" v
instance FromJSON StackFrame where
parseJSON (Object o) = StackFrame <$> reqStr "fileName"
<*> reqStr "className"
<*> reqStr "methodName"
<*> req "lineNumber"
where req :: FromJSON a => Text -> Parser a
req = (o .:) -- all keys are required
reqStr :: Text -> Parser String
reqStr k = req k >>= maybe (return "") return
parseJSON v = typeMismatch "StackFrame" v
|
zerobuzz/hs-webdriver
|
src/Test/WebDriver/Exceptions/Internal.hs
|
bsd-3-clause
| 7,272 | 0 | 17 | 2,587 | 1,369 | 747 | 622 | 129 | 1 |
{-# LANGUAGE DeriveGeneric, KindSignatures, TemplateHaskell,
QuasiQuotes, FlexibleInstances, TypeOperators, TypeSynonymInstances,
MultiParamTypeClasses, FunctionalDependencies, OverlappingInstances,
ScopedTypeVariables, EmptyDataDecls, DefaultSignatures, ViewPatterns,
UndecidableInstances, FlexibleContexts, StandaloneDeriving, IncoherentInstances,
DeriveDataTypeable #-}
module MRP.CommandsC where
import MRP.Commands
import MRP.QuasiQuoter
import qualified Data.ByteString as BS
import Language.C.Simple.CType
import Language.C.Simple.CType.Build
import Language.C.Simple.CType.Build.TH
import Language.C.Simple.Evaluator
import TypeLevel.NaturalNumber
import Language.C
import Data.Loc
import Language.C.Syntax
import Data.Symbol
instance ToCType ResourceEnv where
toCType = const $ TStruct "ResourceEnv" [
TMember "resources" $ TPointer $ TNamed "Resource",
TMember "count" $ TPrimitive TInt,
TMember "capacity" $ TPrimitive TInt,
TMember "in_use" $ TPointer $ TPrimitive TInt,
TMember "evalutor_env" $ TPointer $ TPrimitive TVoid,
TMember "evalutor_eval" $ TFuncPointer [TPointer $ TPrimitive TVoid,
TPointer $ TPrimitive TVoid]]
resource_c = TStruct "Resource" [
TMember "id" $ TPrimitive TInt,
TMember "bytes" $ TNamed "ByteString"
]
instance ToCType BS.ByteString where
toCType = const $ TStruct "ByteString" [
TMember "bytes" $ TVariable $ TPrimitive TChar
]
$(mk_c_type_instance' [("a", ["ToCType"]), ("b", ["ToCType"])] ''Either)
$(mk_c_type_instance' [("a", ["ToCType"]), ("b", ["ToCType"]), ("c", ["ToCType"])] ''Command)
$(mk_c_type_instance' [] ''IdMissing)
$(mk_c_type_instance' [] ''CreateInput )
$(mk_c_type_instance' [] ''CreateOutput )
$(mk_c_type_instance' [] ''CreateError )
$(mk_c_type_instance' [] ''DeleteInput )
$(mk_c_type_instance' [] ''DeleteOutput )
$(mk_c_type_instance' [] ''PutInput )
$(mk_c_type_instance' [] ''PutOutput )
$(mk_c_type_instance' [] ''GetInput )
$(mk_c_type_instance' [] ''GetOutput )
$(mk_c_named_members ''ResourceCommand)
--I need to make the evaluator
--first I need to generalize the evalutor code I already have to include an environment
--and take in handlers
--I need to make the
--environment and the handlers
copy_bytestring = [cfun| void copy_bytestring(const ByteString* input, ByteString* output) {
assert(input->size == output->size);
memcpy(output->bytes, input->bytes, input->size);
}|];
handle_create = [cfun| void create(ResourceEnv* env, CreateInput* input, CreateOutput* output) {
assert(env->capacity > env->count);
ByteString byte_string = {malloc(input->size), input->size};
Resource resource = {input->id, byte_string};
for(int i = 0; i < env->capacity; i++) {
if(env->in_use[i] == 0) {
env->resources[i] = resource;
env->in_use[i] = 1;
}
}
env->count++;
}|]
handle_delete = [cfun| void delete(ResourceEnv* env, const DeleteInput* input, DeleteOutput* output) {
assert(env->capacity > env->count);
for(int i = 0; i < env->capacity; i++) {
if(env->in_use[i] == 1) {
if(env->resources[i].id == input->id) {
free(env->resources.bytes.bytes);
env->in_use[i] = 0;
}
}
}
env->count--;
}|]
handle_get = [cfun| void get(ResourceEnv* env, const GetInput* input, GetOutput* output) {
assert(env->in_use[input->id]);
for(int i = 0; i < env->capacity; i++) {
if(env->resources[i].id == input->id) {
output->bytestring = &input->bytes;
return;
}
}
}|]
handle_put = [cfun| void put(ResourceEnv* env, const PutInput* input, PutOutput* output) {
assert(env->in_use[input->id]);
for(int i = 0; i < env->capacity; i++) {
if(env->resources[i].id == input->id) {
copy_bytestring(input->bytes, output->bytestring);
return;
}
}
}|]
--get the evalutor creating code
--then actually write the code out
--and compile it
--make that a main function
--then test it by running it from haskell
--test, test, and release online
--get the inputs
{-
fixup_offset = [cfun| void fixup_offset(ResourceEnv* env, int i, RunInput* input) {
int offset = input->offset[i];
char* command = input->command;
int id = *(int*)&command[offset];
((int*)(&command[offset]))[0] = env->resources[id].bytes.bytes;
} |]
--call the eval
handle_run = [cfun| void run(ResourceEnv* env, const RunInput* input, RunOutput* output) {
for(int i = 0; i < input->fixup_count; i++) {
fixup_offset(env, i, input);
}
env->evaluator_eval(env->evaluator-env, input->command);
}
|]
-}
|
jfischoff/minimal-resource-protocol
|
src/MRP/CommandsC.hs
|
bsd-3-clause
| 5,932 | 0 | 13 | 2,138 | 610 | 332 | 278 | 52 | 1 |
-- Solution to Stanford Compilers Course.
-- (c) Copyright 2012 Michael Starzinger. All Rights Reserved.
module Main (main) where
import Assembler.Printer (prettyAsm)
import CoolCodegen (generate)
import CoolSemant (Result(..),semant)
import Data.List (find)
import Data.Maybe (fromJust,isJust)
import System.Console.GetOpt
import System.Environment (getArgs)
import System.IO (IOMode(WriteMode),withFile)
-- This is a description of the command line options to this program.
options :: [OptDescr (Maybe String)]
options = [
Option ['g'] ["garbage"] (NoArg Nothing) "gabage collection",
Option ['o'] ["output"] (ReqArg Just "FILE") "output file"
]
-- This is the main program entry for the code generator used as a
-- standalone program that reads input from a file and ...
main = do
args <- getArgs
(filename1, filename2) <- do
case getOpt RequireOrder options args of
(o,[x],[]) -> do
let def = x ++ ".s"
let out = maybe def fromJust (find isJust o)
return (x, out)
(_,___,__) -> error $ usageInfo "Invalid usage." options
input <- readFile filename1
case semant input of
(Program p) -> withFile filename2 WriteMode $ prettyAsm $ generate p
(LexerError _ m) -> error $ "Lexical Error: " ++ m
(ParserError _ m) -> error $ "Syntax Error: " ++ m
(SemanticError _ m) -> error $ "Semantical Error: " ++ m
(GenericError m) -> error $ "Generic Error: " ++ m
|
mstarzinger/coolc
|
generator.hs
|
bsd-3-clause
| 1,411 | 6 | 20 | 269 | 447 | 240 | 207 | 29 | 6 |
{-#LANGUAGE OverloadedStrings#-}
-- convert sam file to fastq file
-- Min Zhang
-- 10-10-14
-- Version 0.0.1
import qualified System.Environment as Sys
import IO
import qualified Safe
main = do
instructions
inputArguments <- interactive
test inputArguments
process inputArguments
instructions = putStrLn "\nsamToFastq: Convert unmapped sam files to fastq files.\n\
\ usage: ./samToFastq [input sam file path] [output fastq file path] \n"
interactive = do
paths <- Sys.getArgs
let input = Just (head paths)
let output = Just (last paths)
if length paths /= 2
then do return (Nothing, Nothing)
else do return (input, output)
test (input, output)
| and [input == Nothing, output == Nothing] = putStrLn "The input needs to be two files, input sam and output fastq. \n"
| fmap (reverse . take 4) input /= Just "mas." = putStrLn "The input needs to be Sam file. \n"
| fmap (reverse . take 6) output /= Just "qtsaf." = putStrLn "The output needs to be Fastq file. \n"
| otherwise = putStrLn "Everything looks good. \n"
process (input, output)
| (input, output) == (Nothing, Nothing) = putStrLn "Error: please check your input.\n"
| otherwise = samToFastq input' output'
where input' = toPath input
output' = toPath output
toPath = \(Just x)->x::FilePath
|
Min-/fourseq
|
src/utils/SamToFastq.hs
|
bsd-3-clause
| 1,349 | 0 | 12 | 305 | 358 | 177 | 181 | 28 | 2 |
--
-- Copyright (C) 2004 Don Stewart - http://www.cse.unsw.edu.au/~dons
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
-- USA
--
module System.Plugins.Utils (
Arg,
hWrite,
mkUnique,
hMkUnique,
mkUniqueIn,
hMkUniqueIn,
findFile,
mkTemp, mkTempIn, {- internal -}
replaceSuffix,
outFilePath,
dropSuffix,
mkModid,
changeFileExt,
joinFileExt,
splitFileExt,
isSublistOf, -- :: Eq a => [a] -> [a] -> Bool
dirname,
basename,
(</>), (<.>), (<+>), (<>),
newer,
encode,
decode,
EncodedString,
panic
) where
#include "../../../config.h"
import System.Plugins.Env ( isLoaded )
import System.Plugins.Consts ( objSuf, hiSuf, tmpDir )
import qualified System.MkTemp ( mkstemps )
import Data.Char
import Data.List
import System.IO
import System.Environment ( getEnv )
import System.Directory
-- ---------------------------------------------------------------------
-- some misc types we use
type Arg = String
-- ---------------------------------------------------------------------
-- | useful
--
panic s = ioError ( userError s )
-- ---------------------------------------------------------------------
-- | writeFile for Handles
--
hWrite :: Handle -> String -> IO ()
hWrite hdl src = hPutStr hdl src >> hClose hdl >> return ()
-- ---------------------------------------------------------------------
-- | mkstemps.
--
-- We use the Haskell version now... it is faster than calling into
-- mkstemps(3).
--
mkstemps :: String -> Int -> IO (String,Handle)
mkstemps path slen = do
m_v <- System.MkTemp.mkstemps path slen
case m_v of Nothing -> error "mkstemps : couldn't create temp file"
Just v' -> return v'
{-
mkstemps path slen = do
withCString path $ \ ptr -> do
let c_slen = fromIntegral $ slen+1
fd <- throwErrnoIfMinus1 "mkstemps" $ c_mkstemps ptr c_slen
name <- peekCString ptr
hdl <- fdToHandle fd
return (name, hdl)
foreign import ccall unsafe "mkstemps" c_mkstemps :: CString -> CInt -> IO Fd
-}
-- ---------------------------------------------------------------------
-- | create a new temp file, returning name and handle.
-- bit like the mktemp shell utility
--
mkTemp :: IO (String,Handle)
mkTemp = do tmpd <- catch (getEnv "TMPDIR") (\_ -> return tmpDir)
mkTempIn tmpd
mkTempIn :: String -> IO (String, Handle)
mkTempIn tmpd = do
(tmpf,hdl) <- mkstemps (tmpd++"/MXXXXXXXXX.hs") 3
let modname = mkModid $ dropSuffix tmpf
if and $ map (\c -> isAlphaNum c && c /= '_') modname
then return (tmpf,hdl)
else panic $ "Illegal characters in temp file: `"++tmpf++"'"
-- ---------------------------------------------------------------------
-- | Get a new temp file, unique from those in /tmp, and from those
-- modules already loaded. Very nice for merge/eval uses.
--
-- Will run for a long time if we can't create a temp file, luckily
-- mkstemps gives us a pretty big search space
--
mkUnique :: IO FilePath
mkUnique = do (t,h) <- hMkUnique
hClose h >> return t
hMkUnique :: IO (FilePath,Handle)
hMkUnique = do (t,h) <- mkTemp
alreadyLoaded <- isLoaded t -- not unique!
if alreadyLoaded
then hClose h >> removeFile t >> hMkUnique
else return (t,h)
mkUniqueIn :: FilePath -> IO FilePath
mkUniqueIn dir = do (t,h) <- hMkUniqueIn dir
hClose h >> return t
hMkUniqueIn :: FilePath -> IO (FilePath,Handle)
hMkUniqueIn dir = do (t,h) <- mkTempIn dir
alreadyLoaded <- isLoaded t -- not unique!
if alreadyLoaded
then hClose h >> removeFile t >> hMkUniqueIn dir
else return (t,h)
findFile :: [String] -> FilePath -> IO (Maybe FilePath)
findFile [] _ = return Nothing
findFile (ext:exts) file
= do let l = changeFileExt file ext
b <- doesFileExist l
if b then return $ Just l
else findFile exts file
-- ---------------------------------------------------------------------
-- some filename manipulation stuff
--
-- | </>, <.> : join two path components
--
infixr 6 </>
infixr 6 <.>
(</>), (<.>), (<+>), (<>) :: FilePath -> FilePath -> FilePath
[] </> b = b
a </> b = a ++ "/" ++ b
[] <.> b = b
a <.> b = a ++ "." ++ b
[] <+> b = b
a <+> b = a ++ " " ++ b
[] <> b = b
a <> b = a ++ b
--
-- | dirname : return the directory portion of a file path
-- if null, return "."
--
dirname :: FilePath -> FilePath
dirname p =
let x = findIndices (== '\\') p
y = findIndices (== '/') p
in
if not $ null x
then if not $ null y
then if (maximum x) > (maximum y) then dirname' '\\' p else dirname' '/' p
else dirname' '\\' p
else dirname' '/' p
where
dirname' chara pa =
case reverse $ dropWhile (/= chara) $ reverse pa of
[] -> "."
pa' -> pa'
--
-- | basename : return the filename portion of a path
--
basename :: FilePath -> FilePath
basename p =
let x = findIndices (== '\\') p
y = findIndices (== '/') p
in
if not $ null x
then if not $ null y
then if (maximum x) > (maximum y) then basename' '\\' p else basename' '/' p
else basename' '\\' p
else basename' '/' p
where
basename' chara pa = reverse $ takeWhile (/= chara) $ reverse pa
--
-- drop suffix
--
dropSuffix :: FilePath -> FilePath
dropSuffix f = reverse . tail . dropWhile (/= '.') $ reverse f
--
-- | work out the mod name from a filepath
mkModid :: String -> String
mkModid = (takeWhile (/= '.')) . reverse . (takeWhile (\x -> ('/'/= x) && ('\\' /= x))) . reverse
-----------------------------------------------------------
-- Code from Cabal ----------------------------------------
-- | Changes the extension of a file path.
changeFileExt :: FilePath -- ^ The path information to modify.
-> String -- ^ The new extension (without a leading period).
-- Specify an empty string to remove an existing
-- extension from path.
-> FilePath -- ^ A string containing the modified path information.
changeFileExt fpath ext = joinFileExt name ext
where
(name,_) = splitFileExt fpath
-- | The 'joinFileExt' function is the opposite of 'splitFileExt'.
-- It joins a file name and an extension to form a complete file path.
--
-- The general rule is:
--
-- > filename `joinFileExt` ext == path
-- > where
-- > (filename,ext) = splitFileExt path
joinFileExt :: String -> String -> FilePath
joinFileExt fpath "" = fpath
joinFileExt fpath ext = fpath ++ '.':ext
-- | Split the path into file name and extension. If the file doesn\'t have extension,
-- the function will return empty string. The extension doesn\'t include a leading period.
--
-- Examples:
--
-- > splitFileExt "foo.ext" == ("foo", "ext")
-- > splitFileExt "foo" == ("foo", "")
-- > splitFileExt "." == (".", "")
-- > splitFileExt ".." == ("..", "")
-- > splitFileExt "foo.bar."== ("foo.bar.", "")
splitFileExt :: FilePath -> (String, String)
splitFileExt p =
case break (== '.') fname of
(suf@(_:_),_:pre) -> (reverse (pre++fpath), reverse suf)
_ -> (p, [])
where
(fname,fpath) = break isPathSeparator (reverse p)
-- | Checks whether the character is a valid path separator for the host
-- platform. The valid character is a 'pathSeparator' but since the Windows
-- operating system also accepts a slash (\"\/\") since DOS 2, the function
-- checks for it on this platform, too.
isPathSeparator :: Char -> Bool
isPathSeparator ch =
#if defined(CYGWIN) || defined(__MINGW32__)
ch == '/' || ch == '\\'
#else
ch == '/'
#endif
-- Code from Cabal end ------------------------------------
-----------------------------------------------------------
-- | return the object file, given the .conf file
-- i.e. /home/dons/foo.rc -> /home/dons/foo.o
--
-- we depend on the suffix we are given having a lead '.'
--
replaceSuffix :: FilePath -> String -> FilePath
replaceSuffix [] _ = [] -- ?
replaceSuffix f suf =
case reverse $ dropWhile (/= '.') $ reverse f of
[] -> f ++ suf -- no '.' in file name
f' -> f' ++ tail suf
--
-- Normally we create the .hi and .o files next to the .hs files.
-- For some uses this is annoying (i.e. true EDSL users don't actually
-- want to know that their code is compiled at all), and for hmake-like
-- applications.
--
-- This code checks if "-o foo" or "-odir foodir" are supplied as args
-- to make(), and if so returns a modified file path, otherwise it
-- uses the source file to determing the path to where the object and
-- .hi file will be put.
--
outFilePath :: FilePath -> [Arg] -> (FilePath,FilePath)
outFilePath src args =
let objs = find_o args -- user sets explicit object path
paths = find_p args -- user sets a directory to put stuff in
in case () of { _
| not (null objs)
-> let obj = last objs in (obj, mk_hi obj)
| not (null paths)
-> let obj = last paths </> mk_o (basename src) in (obj, mk_hi obj)
| otherwise
-> (mk_o src, mk_hi src)
}
where
outpath = "-o"
outdir = "-odir"
mk_hi s = replaceSuffix s hiSuf
mk_o s = replaceSuffix s objSuf
find_o [] = []
find_o (f:f':fs) | f == outpath = [f']
| otherwise = find_o $! f':fs
find_o _ = []
find_p [] = []
find_p (f:f':fs) | f == outdir = [f']
| otherwise = find_p $! f':fs
find_p _ = []
------------------------------------------------------------------------
--
-- | is file1 newer than file2?
--
-- needs some fixing to work with 6.0.x series. (is this true?)
--
-- fileExist still seems to throw exceptions on some platforms: ia64 in
-- particular.
--
-- invarient : we already assume the first file, 'a', exists
--
newer :: FilePath -> FilePath -> IO Bool
newer a b = do
a_t <- getModificationTime a
b_exists <- doesFileExist b
if not b_exists
then return True -- needs compiling
else do b_t <- getModificationTime b
return ( a_t > b_t ) -- maybe need recompiling
------------------------------------------------------------------------
--
-- | return the Z-Encoding of the string.
--
-- Stolen from GHC. Use -package ghc as soon as possible
--
type EncodedString = String
encode :: String -> EncodedString
encode [] = []
encode (c:cs) = encode_ch c ++ encode cs
unencodedChar :: Char -> Bool -- True for chars that don't need encoding
unencodedChar 'Z' = False
unencodedChar 'z' = False
unencodedChar c = c >= 'a' && c <= 'z'
|| c >= 'A' && c <= 'Z'
|| c >= '0' && c <= '9'
--
-- Decode is used for user printing.
--
decode :: EncodedString -> String
decode [] = []
decode ('Z' : d : rest) | isDigit d = decode_tuple d rest
| otherwise = decode_upper d : decode rest
decode ('z' : d : rest) | isDigit d = decode_num_esc d rest
| otherwise = decode_lower d : decode rest
decode (c : rest) = c : decode rest
decode_upper, decode_lower :: Char -> Char
decode_upper 'L' = '('
decode_upper 'R' = ')'
decode_upper 'M' = '['
decode_upper 'N' = ']'
decode_upper 'C' = ':'
decode_upper 'Z' = 'Z'
decode_upper ch = error $ "decode_upper can't handle this char `"++[ch]++"'"
decode_lower 'z' = 'z'
decode_lower 'a' = '&'
decode_lower 'b' = '|'
decode_lower 'c' = '^'
decode_lower 'd' = '$'
decode_lower 'e' = '='
decode_lower 'g' = '>'
decode_lower 'h' = '#'
decode_lower 'i' = '.'
decode_lower 'l' = '<'
decode_lower 'm' = '-'
decode_lower 'n' = '!'
decode_lower 'p' = '+'
decode_lower 'q' = '\''
decode_lower 'r' = '\\'
decode_lower 's' = '/'
decode_lower 't' = '*'
decode_lower 'u' = '_'
decode_lower 'v' = '%'
decode_lower ch = error $ "decode_lower can't handle this char `"++[ch]++"'"
-- Characters not having a specific code are coded as z224U
decode_num_esc :: Char -> [Char] -> String
decode_num_esc d cs
= go (digitToInt d) cs
where
go n (c : rest) | isDigit c = go (10*n + digitToInt c) rest
go n ('U' : rest) = chr n : decode rest
go _ other = error $
"decode_num_esc can't handle this: \""++other++"\""
encode_ch :: Char -> EncodedString
encode_ch c | unencodedChar c = [c] -- Common case first
-- Constructors
encode_ch '(' = "ZL" -- Needed for things like (,), and (->)
encode_ch ')' = "ZR" -- For symmetry with (
encode_ch '[' = "ZM"
encode_ch ']' = "ZN"
encode_ch ':' = "ZC"
encode_ch 'Z' = "ZZ"
-- Variables
encode_ch 'z' = "zz"
encode_ch '&' = "za"
encode_ch '|' = "zb"
encode_ch '^' = "zc"
encode_ch '$' = "zd"
encode_ch '=' = "ze"
encode_ch '>' = "zg"
encode_ch '#' = "zh"
encode_ch '.' = "zi"
encode_ch '<' = "zl"
encode_ch '-' = "zm"
encode_ch '!' = "zn"
encode_ch '+' = "zp"
encode_ch '\'' = "zq"
encode_ch '\\' = "zr"
encode_ch '/' = "zs"
encode_ch '*' = "zt"
encode_ch '_' = "zu"
encode_ch '%' = "zv"
encode_ch c = 'z' : shows (ord c) "U"
decode_tuple :: Char -> EncodedString -> String
decode_tuple d cs
= go (digitToInt d) cs
where
go n (c : rest) | isDigit c = go (10*n + digitToInt c) rest
go 0 ['T'] = "()"
go n ['T'] = '(' : replicate (n-1) ',' ++ ")"
go 1 ['H'] = "(# #)"
go n ['H'] = '(' : '#' : replicate (n-1) ',' ++ "#)"
go _ other = error $ "decode_tuple \'"++other++"'"
-- ---------------------------------------------------------------------
--
-- 'isSublistOf' takes two arguments and returns 'True' iff the first
-- list is a sublist of the second list. This means that the first list
-- is wholly contained within the second list. Both lists must be
-- finite.
isSublistOf :: Eq a => [a] -> [a] -> Bool
isSublistOf [] _ = True
isSublistOf _ [] = False
isSublistOf x y@(_:ys)
| isPrefixOf x y = True
| otherwise = isSublistOf x ys
|
abuiles/turbinado-blog
|
tmp/dependencies/hs-plugins-1.3.1/src/System/Plugins/Utils.hs
|
bsd-3-clause
| 15,029 | 0 | 18 | 3,959 | 3,584 | 1,902 | 1,682 | 264 | 6 |
{-|
Module : CPE
Description : Data types and parsers for CPEs
Copyright : (c) Stephen O'Brien 2016
License : MIT
Maintainer : [email protected]
Stability : experimental
Portability :
-}
module CPE.Parser.CPE where
|
wayofthepie/cpe
|
src/CPE/Parser/CPE.hs
|
bsd-3-clause
| 263 | 0 | 3 | 77 | 8 | 6 | 2 | 1 | 0 |
module ProcLang.Evaluator
( valueOf
, run
, eval
, evalProgram
) where
import Control.Applicative ((<|>))
import ProcLang.Data
import ProcLang.Parser
type EvaluateResult = Try ExpressedValue
liftMaybe :: a -> Maybe b -> Either a b
liftMaybe _ (Just x) = Right x
liftMaybe y Nothing = Left y
run :: String -> EvaluateResult
run input = parseProgram input >>= evalProgram
eval :: Expression -> EvaluateResult
eval = flip valueOf empty
evalProgram :: Program -> EvaluateResult
evalProgram (Prog expr) = eval expr
valueOf :: Expression -> Environment -> EvaluateResult
valueOf (ConstExpr x) _ = evalConstExpr x
valueOf (VarExpr var) env = evalVarExpr var env
valueOf (BinOpExpr op expr1 expr2) env = evalBinOpExpr op expr1 expr2 env
valueOf (UnaryOpExpr op expr) env = evalUnaryOpExpr op expr env
valueOf (CondExpr pairs) env = evalCondExpr pairs env
valueOf (LetExpr bindings body) env = evalLetExpr bindings body env
valueOf (LetStarExpr bindings body) env = evalLetStarExpr bindings body env
valueOf (ProcExpr params body) env = evalProcExpr params body env
valueOf (CallExpr rator rand) env = evalCallExpr rator rand env
evalConstExpr :: ExpressedValue -> EvaluateResult
evalConstExpr = return
evalVarExpr :: String -> Environment -> EvaluateResult
evalVarExpr var env =
liftMaybe ("Not in scope: " `mappend` var) (apply env var)
binBoolOpMap :: [(BinOp, Bool -> Bool -> Bool)]
binBoolOpMap = []
binNumToNumOpMap :: [(BinOp, Integer -> Integer -> Integer)]
binNumToNumOpMap = [(Add, (+)), (Sub, (-)), (Mul, (*)), (Div, div)]
binNumToBoolOpMap :: [(BinOp, Integer -> Integer -> Bool)]
binNumToBoolOpMap = [(Gt, (>)), (Le, (<)), (Eq, (==))]
unaryBoolOpMap :: [(UnaryOp, Bool -> Bool)]
unaryBoolOpMap = []
unaryNumToNumOpMap :: [(UnaryOp, Integer -> Integer)]
unaryNumToNumOpMap = [(Minus, negate)]
unaryNumToBoolOpMap :: [(UnaryOp, Integer -> Bool)]
unaryNumToBoolOpMap = [(IsZero, (0 ==))]
unpackNum :: String -> ExpressedValue -> Try Integer
unpackNum _ (ExprNum n) = return n
unpackNum caller notNum = Left $ concat [
caller, ": Unpacking a not number value: ", show notNum ]
unpackBool :: String -> ExpressedValue -> Try Bool
unpackBool _ (ExprBool b) = return b
unpackBool caller notBool = Left $ concat [
caller, ": Unpacking a not boolean value: ", show notBool ]
tryFind :: Eq a => String -> a -> [(a, b)] -> Try b
tryFind err x pairs = liftMaybe err (lookup x pairs)
tryFindOp :: (Eq a, Show a) => a -> [(a, b)] -> Try b
tryFindOp op = tryFind ("Unknown operator: " `mappend` show op) op
evalBinOpExpr :: BinOp -> Expression -> Expression -> Environment
-> EvaluateResult
evalBinOpExpr op expr1 expr2 env = do
v1 <- valueOf expr1 env
v2 <- valueOf expr2 env
numToNum v1 v2 <|> numToBool v1 v2 <|> boolToBool v1 v2
where
findOpFrom = tryFindOp op
unpackN = unpackNum $ "binary operation " `mappend` show op
unpackB = unpackBool $ "binary operation " `mappend` show op
numToNum :: ExpressedValue -> ExpressedValue -> EvaluateResult
numToNum val1 val2 = do
func <- findOpFrom binNumToNumOpMap
n1 <- unpackN val1
n2 <- unpackN val2
return . ExprNum $ func n1 n2
numToBool :: ExpressedValue -> ExpressedValue -> EvaluateResult
numToBool val1 val2 = do
func <- findOpFrom binNumToBoolOpMap
n1 <- unpackN val1
n2 <- unpackN val2
return . ExprBool $ func n1 n2
boolToBool :: ExpressedValue -> ExpressedValue -> EvaluateResult
boolToBool val1 val2 = do
func <- findOpFrom binBoolOpMap
b1 <- unpackB val1
b2 <- unpackB val2
return . ExprBool $ func b1 b2
evalUnaryOpExpr :: UnaryOp -> Expression -> Environment
-> EvaluateResult
evalUnaryOpExpr op expr env = do
v <- valueOf expr env
numToNum v <|> numToBool v <|> boolToBool v
where
findOpFrom = tryFindOp op
unpackN = unpackNum $ "unary operation " `mappend` show op
unpackB = unpackBool $ "unary operation " `mappend` show op
numToNum :: ExpressedValue -> EvaluateResult
numToNum val = do
func <- findOpFrom unaryNumToNumOpMap
n <- unpackN val
return . ExprNum $ func n
numToBool :: ExpressedValue -> EvaluateResult
numToBool val = do
func <- findOpFrom unaryNumToBoolOpMap
n <- unpackN val
return . ExprBool $ func n
boolToBool :: ExpressedValue -> EvaluateResult
boolToBool val = do
func <- findOpFrom unaryBoolOpMap
b <- unpackB val
return . ExprBool $ func b
evalCondExpr :: [(Expression, Expression)] -> Environment -> EvaluateResult
evalCondExpr [] _ = Left "No predicate is true"
evalCondExpr ((e1, e2):pairs) env = do
v <- valueOf e1 env
case v of
ExprBool True -> valueOf e2 env
ExprBool False -> evalCondExpr pairs env
_ -> Left $
"Predicate expression should be boolean, but got: "
`mappend` show v
evalLetExpr :: [(String, Expression)] -> Expression -> Environment
-> EvaluateResult
evalLetExpr bindings body env = do
bindVals <- evaledBindings
valueOf body $ extendMany bindVals env
where
func maybeBindVals (name, expr) = do
pairs <- maybeBindVals
val <- valueOf expr env
return $ (name, val):pairs
evaledBindings = do
pairs <- foldl func (return []) bindings
return $ reverse pairs
evalLetStarExpr :: [(String, Expression)] -> Expression -> Environment
-> EvaluateResult
evalLetStarExpr [] body env = valueOf body env
evalLetStarExpr ((var, expr):pairs) body env = do
val <- valueOf expr env
evalLetStarExpr pairs body (extend var val env)
evalProcExpr :: [String] -> Expression -> Environment -> EvaluateResult
evalProcExpr params body env = return . ExprProc $ Procedure params body env
evalCallExpr :: Expression -> [Expression] -> Environment -> EvaluateResult
evalCallExpr rator rand env = do
rator <- valueOf rator env
proc <- unpackProc rator
args <- maybeArgs
applyProcedure proc args
where
unpackProc :: ExpressedValue -> Try Procedure
unpackProc (ExprProc proc) = Right proc
unpackProc notProc = Left $
"Operator of call expression should be procedure, but got: "
`mappend` show notProc
func :: Try [ExpressedValue] -> Try ExpressedValue -> Try [ExpressedValue]
func maybeArgs maybeArg = do
args <- maybeArgs
arg <- maybeArg
return $ arg:args
maybeArgs :: Try [ExpressedValue]
maybeArgs = reverse <$>
foldl func (return []) (fmap (`valueOf` env) rand)
applyProcedure :: Procedure -> [ExpressedValue] -> EvaluateResult
applyProcedure (Procedure params body savedEnv) args =
applyProcedure' params body savedEnv args []
applyProcedure' :: [String] -> Expression -> Environment
-> [ExpressedValue]
-> [String]
-> EvaluateResult
applyProcedure' [] body env [] _ = valueOf body env
applyProcedure' params _ _ [] _ =
Left $ "Too many parameters: " `mappend` show params
applyProcedure' [] _ _ args _ =
Left $ "Too many arguments: " `mappend` show args
applyProcedure' (p:ps) body env (a:as) usedParams =
if p `elem` usedParams
then Left $ "Parameter name conflict: " `mappend` p
else applyProcedure' ps body (extend p a env) as (p:usedParams)
|
li-zhirui/EoplLangs
|
src/ProcLang/Evaluator.hs
|
bsd-3-clause
| 7,387 | 0 | 13 | 1,728 | 2,474 | 1,265 | 1,209 | 172 | 6 |
-- | Operations on the 'Area' type that involve random numbers.
module Game.LambdaHack.Server.DungeonGen.AreaRnd
( -- * Picking points inside areas
mkFixed, pointInArea, findPointInArea, mkVoidRoom, mkRoom
-- * Choosing connections
, connectGrid, randomConnection
-- * Plotting corridors
, HV(..), Corridor, connectPlaces
, SpecialArea(..), grid
#ifdef EXPOSE_INTERNAL
-- * Internal operations
, connectGrid', sortPoint, mkCorridor, borderPlace
#endif
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import qualified Data.EnumMap.Strict as EM
import qualified Data.EnumSet as ES
import Data.Functor.Identity (runIdentity)
import qualified Data.IntSet as IS
import Game.LambdaHack.Common.Area
import Game.LambdaHack.Common.Point
import Game.LambdaHack.Common.Vector
import Game.LambdaHack.Content.PlaceKind
import Game.LambdaHack.Core.Random
import Game.LambdaHack.Definition.Defs
-- Doesn't respect minimum sizes, because staircases are specified verbatim,
-- so can't be arbitrarily scaled up.
-- The size may be one more than what maximal size hint requests,
-- but this is safe (limited by area size) and makes up for the rigidity
-- of the fixed room sizes (e.g., that the size is always odd).
mkFixed :: (X, Y) -- ^ maximum size
-> Area -- ^ the containing area, not the room itself
-> Point -- ^ the center point
-> Area
mkFixed (xMax, yMax) area p@Point{..} =
let (x0, y0, x1, y1) = fromArea area
xradius = min ((xMax + 1) `div` 2) $ min (px - x0) (x1 - px)
yradius = min ((yMax + 1) `div` 2) $ min (py - y0) (y1 - py)
a = (px - xradius, py - yradius, px + xradius, py + yradius)
in fromMaybe (error $ "" `showFailure` (a, xMax, yMax, area, p)) $ toArea a
-- | Pick a random point within an area.
pointInArea :: Area -> Rnd Point
pointInArea area = do
let (Point x0 y0, xspan, yspan) = spanArea area
pxy <- randomR0 (xspan * yspan - 1)
let Point{..} = punindex xspan pxy
return $! Point (x0 + px) (y0 + py)
-- | Find a suitable position in the area, based on random points
-- and a preference predicate and fallback acceptability predicate.
findPointInArea :: Area -> (Point -> Maybe Point)
-> Int -> (Point -> Maybe Point)
-> Rnd (Maybe Point)
findPointInArea area g gnumTries f =
let (Point x0 y0, xspan, yspan) = spanArea area
checkPoint :: Applicative m
=> (Point -> Maybe Point) -> m (Maybe Point) -> Int
-> m (Maybe Point)
{-# INLINE checkPoint #-}
checkPoint check fallback pxyRelative =
let Point{..} = punindex xspan pxyRelative
pos = Point (x0 + px) (y0 + py)
in case check pos of
Just p -> pure $ Just p
Nothing -> fallback
gsearch 0 = fsearch (xspan * yspan * 10)
gsearch count = do
pxy <- randomR0 (xspan * yspan - 1)
checkPoint g (gsearch (count - 1)) pxy
fsearch 0 = return $! runIdentity $ searchAll (xspan * yspan - 1)
fsearch count = do
pxy <- randomR0 (xspan * yspan - 1)
checkPoint f (fsearch (count - 1)) pxy
searchAll (-1) = pure Nothing
searchAll pxyRelative =
checkPoint f (searchAll (pxyRelative - 1)) pxyRelative
in gsearch gnumTries
-- | Create a void room, i.e., a single point area within the designated area.
mkVoidRoom :: Area -> Rnd Area
mkVoidRoom area = do
-- Pass corridors closer to the middle of the grid area, if possible.
let core = fromMaybe area $ shrink area
pxy <- pointInArea core
return $! trivialArea pxy
-- | Create a random room according to given parameters.
mkRoom :: (X, Y) -- ^ minimum size
-> (X, Y) -- ^ maximum size
-> Area -- ^ the containing area, not the room itself
-> Rnd Area
mkRoom (xm, ym) (xM, yM) area = do
let (x0, y0, x1, y1) = fromArea area
xspan = x1 - x0 + 1
yspan = y1 - y0 + 1
aW = (min xm xspan, min ym yspan, min xM xspan, min yM yspan)
areaW = fromMaybe (error $ "" `showFailure` aW) $ toArea aW
Point xW yW <- pointInArea areaW -- roll size
let a1 = (x0, y0, max x0 (x1 - xW + 1), max y0 (y1 - yW + 1))
area1 = fromMaybe (error $ "" `showFailure` a1) $ toArea a1
Point rx1 ry1 <- pointInArea area1 -- roll top-left corner
let a3 = (rx1, ry1, rx1 + xW - 1, ry1 + yW - 1)
area3 = fromMaybe (error $ "" `showFailure` a3) $ toArea a3
return $! area3
-- Choosing connections between areas in a grid
-- | Pick a subset of connections between adjacent areas within a grid until
-- there is only one connected component in the graph of all areas.
connectGrid :: ES.EnumSet Point -> (X, Y) -> Rnd [(Point, Point)]
connectGrid voidPlaces (nx, ny) = do
let unconnected = ES.fromDistinctAscList [ Point x y
| y <- [0..ny-1], x <- [0..nx-1] ]
-- Candidates are neighbours that are still unconnected. We start with
-- a random choice.
p <- oneOf $ ES.elems $ unconnected ES.\\ voidPlaces
let candidates = ES.singleton p
connectGrid' voidPlaces (nx, ny) unconnected candidates []
connectGrid' :: ES.EnumSet Point -> (X, Y)
-> ES.EnumSet Point -> ES.EnumSet Point
-> [(Point, Point)]
-> Rnd [(Point, Point)]
connectGrid' voidPlaces (nx, ny) unconnected candidates !acc
| unconnected `ES.isSubsetOf` voidPlaces = return acc
| otherwise = do
let candidatesBest = candidates ES.\\ voidPlaces
c <- oneOf $ ES.elems $ if ES.null candidatesBest
then candidates
else candidatesBest
-- potential new candidates:
let ns = ES.fromList $ vicinityCardinal nx ny c
nu = ES.delete c unconnected -- new unconnected
-- (new candidates, potential connections):
(nc, ds) = ES.partition (`ES.member` nu) ns
new <- if ES.null ds
then return id
else do
d <- oneOf (ES.elems ds)
return (sortPoint (c, d) :)
connectGrid' voidPlaces (nx, ny) nu
(ES.delete c (candidates `ES.union` nc)) (new acc)
-- | Sort the sequence of two points, in the derived lexicographic order.
sortPoint :: (Point, Point) -> (Point, Point)
sortPoint (a, b) | a <= b = (a, b)
| otherwise = (b, a)
-- | Pick a single random connection between adjacent areas within a grid.
randomConnection :: (X, Y) -> Rnd (Point, Point)
randomConnection (nx, ny) =
assert (nx > 1 && ny > 0 || nx > 0 && ny > 1 `blame` (nx, ny)) $ do
rb <- oneOf [False, True]
if rb && nx > 1 || ny <= 1
then do
rx <- randomR0 (nx - 2)
ry <- randomR0 (ny - 1)
return (Point rx ry, Point (rx+1) ry)
else do
rx <- randomR0 (nx - 1)
ry <- randomR0 (ny - 2)
return (Point rx ry, Point rx (ry+1))
-- Plotting individual corridors between two areas
-- | The choice of horizontal and vertical orientation.
data HV = Horiz | Vert
deriving Eq
-- | The coordinates of consecutive fields of a corridor.
type Corridor = (Point, Point, Point, Point)
-- | Create a corridor, either horizontal or vertical, with
-- a possible intermediate part that is in the opposite direction.
-- There might not always exist a good intermediate point
-- if the places are allowed to be close together
-- and then we let the intermediate part degenerate.
mkCorridor :: HV -- ^ orientation of the starting section
-> Point -- ^ starting point
-> Bool -- ^ starting is inside @FGround@ or @FFloor@
-> Point -- ^ ending point
-> Bool -- ^ ending is inside @FGround@ or @FFloor@
-> Area -- ^ the area containing the intermediate point
-> Rnd Corridor -- ^ straight sections of the corridor
mkCorridor hv (Point x0 y0) p0floor (Point x1 y1) p1floor area = do
Point rxRaw ryRaw <- pointInArea area
let (sx0, sy0, sx1, sy1) = fromArea area
-- Avoid corridors that run along @FGround@ or @FFloor@ fence,
-- unless not possible.
rx = if | rxRaw == sx0 + 1 && p0floor -> sx0
| rxRaw == sx1 - 1 && p1floor -> sx1
| otherwise -> rxRaw
ry = if | ryRaw == sy0 + 1 && p0floor -> sy0
| ryRaw == sy1 - 1 && p1floor -> sy1
| otherwise -> ryRaw
return $! case hv of
Horiz -> (Point x0 y0, Point rx y0, Point rx y1, Point x1 y1)
Vert -> (Point x0 y0, Point x0 ry, Point x1 ry, Point x1 y1)
-- | Try to connect two interiors of places with a corridor.
-- Choose entrances some steps away from the edges, if the place
-- is big enough. Note that with @pfence == FNone@, the inner area considered
-- is the strict interior of the place, without the outermost tiles.
--
-- The corridor connects (touches) the inner areas and the turning point
-- of the corridor (if any) is outside of the outer areas
-- and inside the grid areas.
connectPlaces :: (Area, Fence, Area) -> (Area, Fence, Area)
-> Rnd (Maybe Corridor)
connectPlaces (_, _, sg) (_, _, tg) | sg == tg = return Nothing
connectPlaces s3@(sqarea, spfence, sg) t3@(tqarea, tpfence, tg) = do
let (sa, so, stiny) = borderPlace sqarea spfence
(ta, to, ttiny) = borderPlace tqarea tpfence
trim area =
let (x0, y0, x1, y1) = fromArea area
dx = case (x1 - x0) `div` 2 of
0 -> 0
1 -> 1
2 -> 1
3 -> 1
_ -> 3
dy = case (y1 - y0) `div` 2 of
0 -> 0
1 -> 1
2 -> 1
3 -> 1
_ -> 3
in fromMaybe (error $ "" `showFailure` (area, s3, t3))
$ toArea (x0 + dx, y0 + dy, x1 - dx, y1 - dy)
Point sx sy <- pointInArea $ trim sa
Point tx ty <- pointInArea $ trim ta
-- If the place (e.g., void place) is slim (at most 2-tile wide, no fence),
-- overwrite it with corridor. The place may not even be built (e.g., void)
-- and the overwrite ensures connections through it are not broken.
let (_, _, sax1Raw, say1Raw) = fromArea sa -- inner area
sslim = stiny && spfence == FNone
(sax1, say1) = if sslim
then (sax1Raw - 1, say1Raw - 1)
else (sax1Raw, say1Raw)
(tax0Raw, tay0Raw, _, _) = fromArea ta
tslim = ttiny && tpfence == FNone
(tax0, tay0) = if tslim
then (tax0Raw + 1, tay0Raw + 1)
else (tax0Raw, tay0Raw)
(_, _, sox1, soy1) = fromArea so -- outer area
(tox0, toy0, _, _) = fromArea to
(sgx0, sgy0, sgx1, sgy1) = fromArea sg -- grid area
(tgx0, tgy0, tgx1, tgy1) = fromArea tg
(hv, area, p0, p1)
| sgx1 == tgx0 =
let x0 = if sgy0 <= ty && ty <= sgy1 then sox1 + 1 else sgx1
x1 = if tgy0 <= sy && sy <= tgy1 then tox0 - 1 else sgx1
in case toArea (x0, min sy ty, x1, max sy ty) of
Just a -> (Horiz, a, Point (sax1 + 1) sy, Point (tax0 - 1) ty)
Nothing -> error $ "" `showFailure` (sx, sy, tx, ty, s3, t3)
| otherwise = assert (sgy1 == tgy0) $
let y0 = if sgx0 <= tx && tx <= sgx1 then soy1 + 1 else sgy1
y1 = if tgx0 <= sx && sx <= tgx1 then toy0 - 1 else sgy1
in case toArea (min sx tx, y0, max sx tx, y1) of
Just a -> (Vert, a, Point sx (say1 + 1), Point tx (tay0 - 1))
Nothing -> error $ "" `showFailure` (sx, sy, tx, ty, s3, t3)
nin p = not $ inside sa p || inside ta p
!_A = assert (sslim || tslim
|| allB nin [p0, p1] `blame` (sx, sy, tx, ty, s3, t3)) ()
cor@(c1, c2, c3, c4) <- mkCorridor hv p0 (sa == so) p1 (ta == to) area
let !_A2 = assert (sslim || tslim || allB nin [c1, c2, c3, c4]
`blame` (cor, sx, sy, tx, ty, s3, t3)) ()
return $ Just cor
borderPlace :: Area -> Fence -> (Area, Area, Bool)
borderPlace qarea pfence = case pfence of
FWall -> (qarea, expand qarea, False)
FFloor -> (qarea, qarea, False)
FGround -> (qarea, qarea, False)
FNone -> case shrink qarea of
Nothing -> (qarea, qarea, True)
Just sr -> (sr, qarea, False)
data SpecialArea =
SpecialArea Area
| SpecialFixed Point (Freqs PlaceKind) Area
| SpecialMerged SpecialArea Point
deriving Show
-- | Divide uniformly a larger area into the given number of smaller areas
-- overlapping at the edges.
--
-- The list of fixed centers (some important points inside)
-- of (non-overlapping) areas is given. Incorporate those,
-- with as little disruption, as possible.
-- Assume each of four boundaries of the cave are covered by a fixed centre.
grid :: EM.EnumMap Point (Freqs PlaceKind) -> [Point] -> Area -> (X, Y)
-> ((X, Y), EM.EnumMap Point SpecialArea)
grid fixedCenters boot area cellSize =
let (x0, y0, x1, y1) = fromArea area
f zsize z1 n prev (c1 : c2 : rest) =
let len = c2 - c1
cn = len * n `div` zsize
in -- traceShow ( zsize, z1, n, prev, len, cn
-- , len `div` max 1 (2 * cn) ) $
if cn < 2
then let mid1 = (c1 + c2) `div` 2
mid2 = (c1 + c2) `divUp` 2
mid = if mid1 - prev > 4 then mid1 else mid2
in (prev, mid, Just c1) : f zsize z1 n mid (c2 : rest)
else (prev, c1 + len `div` (2 * cn), Just c1)
: [ ( c1 + len * (2 * z - 1) `div` (2 * cn)
, c1 + len * (2 * z + 1) `div` (2 * cn)
, Nothing )
| z <- [1 .. cn - 1] ]
++ f zsize z1 n (c1 + len * (2 * cn - 1) `div` (2 * cn))
(c2 : rest)
f _ z1 _ prev [c1] = [(prev, z1, Just c1)]
f _ _ _ _ [] = error $ "empty list of centers" `showFailure` fixedCenters
(xCenters, yCenters) = IS.fromList *** IS.fromList
$ unzip $ map (px &&& py) $ EM.keys fixedCenters
distFromIS is z =
- minimum (maxBound : map (\i -> abs (i - z)) (IS.toList is))
xboot = nub $ sortOn (distFromIS xCenters)
$ filter (`IS.notMember` xCenters) $ map px boot
yboot = nub $ sortOn (distFromIS yCenters)
$ filter (`IS.notMember` yCenters) $ map py boot
-- Don't let boots ignore cell size too much, esp. in small caves.
xcellsInArea = (x1 - x0 + 1) `div` fst cellSize
ycellsInArea = (y1 - y0 + 1) `div` snd cellSize
xbootN = assert (xcellsInArea > 0) $ xcellsInArea - IS.size xCenters
ybootN = assert (ycellsInArea > 0) $ ycellsInArea - IS.size yCenters
xset = xCenters `IS.union` IS.fromList (take xbootN xboot)
yset = yCenters `IS.union` IS.fromList (take ybootN yboot)
xsize = IS.findMax xset - IS.findMin xset
ysize = IS.findMax yset - IS.findMin yset
-- This is precisely how the cave will be divided among places,
-- if there are no fixed centres except at boot coordinates.
-- In any case, places, except for at boot points and fixed centres,
-- are guaranteed at least the rolled minimal size of their
-- enclosing cell (with one shared fence). Fixed centres are guaranteed
-- a size between the cave cell size and the one implied by their
-- placement wrt to cave fence and other fixed centers.
lgrid = ( xsize `div` fst cellSize
, ysize `div` snd cellSize )
xallSegments = zip [0..] $ f xsize x1 (fst lgrid) x0 $ IS.toList xset
yallSegments = zip [0..] $ f ysize y1 (snd lgrid) y0 $ IS.toList yset
in -- traceShow (xallSegments, yallSegments) $
( (length xallSegments, length yallSegments)
, EM.fromDistinctAscList
[ ( Point x y
, case (mcx, mcy) of
(Just cx, Just cy) ->
case EM.lookup (Point cx cy) fixedCenters of
Nothing -> SpecialArea sarea
Just placeFreq -> SpecialFixed (Point cx cy) placeFreq sarea
_ -> SpecialArea sarea )
| (y, (cy0, cy1, mcy)) <- yallSegments
, (x, (cx0, cx1, mcx)) <- xallSegments
, let sarea = fromMaybe (error $ "" `showFailure` (x, y))
$ toArea (cx0, cy0, cx1, cy1) ] )
|
LambdaHack/LambdaHack
|
engine-src/Game/LambdaHack/Server/DungeonGen/AreaRnd.hs
|
bsd-3-clause
| 16,236 | 1 | 20 | 4,919 | 5,240 | 2,829 | 2,411 | -1 | -1 |
{-# LANGUAGE LambdaCase #-}
module Path
( Path
, empty
, cons
, uncons
, length
, toList
, lca
, keep
, (~=)
) where
import Data.Function (fix)
import Prelude hiding (head, length)
infix 4 ~=
data Path a
= Nil
| Cons
{-# UNPACK #-} !Int
{-# UNPACK #-} !Int
!(Tree a)
!(Path a)
instance Show a => Show (Path a) where
showsPrec p xs =
showParen (p > 10) $ showString "fromList " . shows (toList xs)
data Tree a
= Tip {-# UNPACK #-} !Int !a
| Bin {-# UNPACK #-} !Int !a !(Tree a) !(Tree a)
empty :: Path a
{-# INLINE empty #-}
empty = Nil
cons :: Int -> a -> Path a -> Path a
{-# INLINE cons #-}
cons k v = \ case
Cons n w t (Cons _ w' t' xs) | w == w' -> Cons (n + 1) (2 * w + 1) (Bin k v t t') xs
xs -> Cons (length xs + 1) 1 (Tip k v) xs
uncons :: Path a -> Maybe (Int, a, Path a)
uncons = \ case
Nil -> Nothing
Cons _ _ (Tip k v) xs -> Just (k, v, xs)
Cons _ w (Bin k v l r) xs -> Just (k, v, consTree w2 l (consTree w2 r xs))
where
w2 = w `div` 2
length :: Path a -> Int
{-# INLINE length #-}
length = \ case
Nil -> 0
Cons n _ _ _ -> n
toList :: Path a -> [(Int, a)]
toList = \ case
Nil -> []
Cons _ _ t xs -> fix (\ rec -> \ case
Tip k v -> ((k, v):)
Bin k v l r -> ((k, v):) . rec l . rec r) t (toList xs)
lca :: Path a -> Path a -> Path a
lca xs xs' = case compare n n' of
LT -> dropUntilSame xs (keep n xs')
EQ -> dropUntilSame xs xs'
GT -> dropUntilSame (keep n' xs) xs'
where
n = length xs
n' = length xs'
keep :: Int -> Path a -> Path a
keep = fix $ \ rec i -> \ case
Nil -> Nil
xs@(Cons n w t ys)
| i >= n -> xs
| otherwise -> case compare i (n - w) of
LT -> rec i ys
EQ -> ys
GT -> go (i - n + w) w t ys
where
go n w (Bin _ _ l r) = case compare n w2 of
LT -> go n w2 r
EQ -> consTree w2 r
GT | n == w - 1 -> consTree w2 l . consTree w2 r
| otherwise -> go (n - w2) w2 l . consTree w2 r
where
w2 = w `div` 2
go _ _ _ = id
(~=) :: Path a -> Path b -> Bool
{-# INLINE (~=) #-}
(~=) = sameHead
dropUntilSame :: Path a -> Path b -> Path a
dropUntilSame xs@(Cons _ w t ys) (Cons _ _ t' ys')
| sameRoot t t' = xs
| sameHead ys ys' = go w t t' ys
| otherwise = dropUntilSame ys ys'
where
go n (Bin _ _ l r) (Bin _ _ l' r')
| sameRoot l l' = consTree n2 l . consTree n2 r
| sameRoot r r' = go n2 l l' . consTree n2 r
| otherwise = go n2 r r'
where
n2 = n `div` 2
go _ _ _ = id
dropUntilSame _ _ = Nil
consTree :: Int -> Tree a -> Path a -> Path a
{-# INLINE consTree #-}
consTree n t xs = Cons (n + length xs) n t xs
sameHead :: Path a -> Path b -> Bool
{-# INLINE sameHead #-}
sameHead Nil Nil = True
sameHead (Cons _ _ t _) (Cons _ _ t' _) = sameRoot t t'
sameHead _ _ = False
sameRoot :: Tree a -> Tree b -> Bool
{-# INLINE sameRoot #-}
sameRoot xs ys = root xs == root ys
root :: Tree a -> Int
{-# INLINE root #-}
root = \ case
Tip k _ -> k
Bin k _ _ _ -> k
|
sonyandy/mlf
|
src/Path.hs
|
bsd-3-clause
| 3,069 | 0 | 17 | 1,017 | 1,585 | 793 | 792 | 118 | 7 |
import Data.DIntMap as DIM
import Control.Monad
import Data.Hashable
xs :: [Int]
xs = [0..1000000]
f = hash
main = do
dmap <- DIM.new "hello" :: IO (DIM.DIntMap Int)
root <- DIM.getRoot dmap
root' <- foldM (\t i->DIM.insert dmap (f i) 1 t) root xs
DIM.putRoot dmap root'
--putStrLn $ showTree root'
--mapM_ (\i->DIM.lookup (f i) dmap >>= print) xs
DIM.lookup (f $ head xs) dmap >>= print
|
bgamari/dtree
|
DIntMapTest.hs
|
bsd-3-clause
| 418 | 0 | 13 | 97 | 161 | 82 | 79 | 12 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Main (main) where
import Control.Applicative ((<$>))
import Control.Concurrent (forkIO)
import Data.Char (toLower)
import Data.Time.Clock.POSIX (getPOSIXTime)
import System.Console.CmdArgs.Implicit
import Curved.Carbon
import Curved.Httpd
import Curved.RTS
import Data.Whisper
import Data.Whisper.Store (newStore)
--import Curved.Cache (newStore)
graphite_whisper_root :: FilePath
graphite_whisper_root = "/opt/graphite/storage/whisper"
main :: IO ()
main = (runCmd =<<) . cmdArgs $
modes
[ cmdInfo
, cmdHttpd
, cmdPush
, cmdCreate
]
&= summary versionString
&= program "curved"
-- | String with the program name, version and copyright.
versionString :: String
versionString =
"Curved. Copyright (c) 2012 Vo Minh Thu."
-- TODO add the version.
-- | Data type representing the different command-line subcommands.
data Cmd =
Info { cmdFilename :: FilePath }
-- ^ Display some Whisper file information.
| Httpd
-- ^ Run the curved web server.
| Push { cmdFilename :: FilePath, cmdValue :: Double }
-- ^ Add a new timestamped value to a Whisper file.
| Create { cmdFilename :: FilePath, cmdPrecision :: Int, cmdSize :: Int }
-- ^ Create a new Whisper file.
deriving (Data, Typeable)
-- | Create a 'Info' command.
cmdInfo :: Cmd
cmdInfo = Info
{ cmdFilename = def
&= argPos 0
&= typ "FILE"
} &= help "Display some Whisper file information."
&= explicit
&= name "info"
-- | Create a 'Httpd' command.
cmdHttpd :: Cmd
cmdHttpd = Httpd
&= help "Run the Curved web server."
&= explicit
&= name "httpd"
-- | Create a 'Push' command.
cmdPush :: Cmd
cmdPush = Push
{ cmdFilename = def
&= argPos 0
&= typ "FILE"
, cmdValue = def
&= argPos 1
&= typ "DOUBLE"
} &= help "Add a new timestamped value to a Whisper file."
&= explicit
&= name "push"
-- | Create a 'Create' command.
cmdCreate :: Cmd
cmdCreate = Create
{ cmdFilename = def
&= argPos 0
&= typ "FILE"
, cmdPrecision = def
&= explicit
&= name "precision"
&= help "Timeframe (in seconds) covered by each point."
, cmdSize = def
&= explicit
&= name "size"
&= help "Number of points contained in the archive."
} &= help "Create a new Whisper file with a single archive."
&= explicit
&= name "create"
-- | Run a sub-command.
runCmd :: Cmd -> IO ()
runCmd Info{..} = do
w <- openWhisper cmdFilename
now <- (floor . toRational) <$> getPOSIXTime
archives <- readArchives w now
let Header{..} = whisperHeader w
MetaData{..} = hMetaData
closeWhisper w
putStrLn $ cmdFilename ++ ": period=" ++ show mdMaxRetention
++ ", aggregation=" ++ map toLower (show mdAggregationType)
++ ", propagation=" ++ show mdXFilesFactor
++ ", archives=" ++ show mdArchiveCount
let f ai@ArchiveInfo{..} = " period=" ++ show (aiRetention ai)
++ ", points=" ++ show aiPoints
++ ", seconds=" ++ show aiSecondsPerPoint
++ ", size=" ++ show (aiSize ai)
++ ", offset=" ++ show aiOffset
mapM_ (putStrLn . f) hArchiveInfo
mapM_ (\(Archive points) -> mapM_ print points) archives
runCmd Httpd{..} = do
store <- newStore graphite_whisper_root
_ <- forkIO $ receivePoints store 2006
_ <- forkIO $ receiveQueries store 7002
_ <- forkIO $ pushGHCStatsToStore store
httpd store 8081 "localhost"
runCmd Push{..} = do
now <- (floor . toRational) <$> getPOSIXTime
updateWhisperFile cmdFilename now cmdValue
runCmd Create{..} =
createWhisper cmdFilename 0.5 Average [(cmdPrecision, cmdSize)]
|
noteed/curved
|
bin/curved.hs
|
bsd-3-clause
| 3,663 | 0 | 21 | 807 | 942 | 488 | 454 | 104 | 1 |
module Cardano.Wallet.API.V1.Internal.Update where
import Servant
import Cardano.Wallet.API.Response (ValidJSON)
type API =
"update"
:> ( "apply"
:> Post '[ValidJSON] NoContent
:<|> "postpone"
:> Post '[ValidJSON] NoContent
)
|
input-output-hk/pos-haskell-prototype
|
wallet/src/Cardano/Wallet/API/V1/Internal/Update.hs
|
mit
| 281 | 0 | 12 | 81 | 70 | 42 | 28 | -1 | -1 |
module Collatz.Inverse where
-- $Id$
import qualified Collatz.Parameter as P
import Collatz.Config
import Collatz.Roll
import Inter.Types
import Inter.Quiz
import Autolib.ToDoc
import Autolib.Hash
import Autolib.Util.Seed
import qualified Challenger as C
import Data.Typeable
import Autolib.Reporter
data Collatz_Inverse = Collatz_Inverse deriving ( Eq, Ord, Show, Read, Typeable )
instance OrderScore Collatz_Inverse where
scoringOrder _ = None -- ?
instance C.Partial Collatz_Inverse P.Parameter Integer where
report Collatz_Inverse p = do
inform $ vcat
[ text "Gesucht ist eine Startzahl, deren Collatz-Folge"
, text "diese Parameter hat:" <+> toDoc p
]
initial Collatz_Inverse p = 27
total Collatz_Inverse p x = do
-- vorsicht: hier könnte max [] = undefined vorkommen
-- aber wegen Parameter deriving Eq werden zuerst die längen verglichen
-- und die stimmen dann eben nicht, weil nur instanzen
-- mit längen > 0 gewürfelt werden (hoffentlich)
assert ( p == P.compute x )
$ text "angegebene Zahl ist korrekt?"
instance C.Measure Collatz_Inverse P.Parameter Integer where
measure Collatz_Inverse p x = 1
make :: Make
make = direct Collatz_Inverse P.one
instance Generator Collatz_Inverse Config ( Integer, P.Parameter ) where
generator p conf key = do
seed $ fromIntegral $ hash key
roll conf
instance Project Collatz_Inverse ( Integer, P.Parameter ) P.Parameter where
project p ( _, q ) = q
qmake :: Make
qmake = quiz Collatz_Inverse rc
|
florianpilz/autotool
|
src/Collatz/Inverse.hs
|
gpl-2.0
| 1,577 | 3 | 14 | 347 | 374 | 199 | 175 | -1 | -1 |
{-# OPTIONS_JHC -fno-prelude #-}
module Jhc.List where
import Jhc.Basics
import Jhc.IO(error)
import Jhc.Int
import Jhc.Order
import Jhc.String
-- | our fusion routines
build :: (forall b . (a -> b -> b) -> b -> b) -> [a]
build g = g (:) []
augment :: forall a. (forall b. (a->b->b) -> b -> b) -> [a] -> [a]
augment g xs = g (:) xs
{-# RULES "foldr/nil" forall k z. foldr k z [] = z #-}
{-# RULES "foldr/single" forall k z x . foldr k z [x] = k x z #-}
{-# RULES "foldr/double" forall k z x y . foldr k z [x,y] = k x (k y z) #-}
{-# RULES "foldr/triple" forall k z a b c . foldr k z [a,b,c] = k a (k b (k c z)) #-}
{-# RULES "foldr/id" foldr (:) [] = \x -> x #-}
{- "foldr/app" [1] forall ys. foldr (:) ys = \xs -> xs ++ ys -}
{-# RULES "foldr/build" forall k z (g :: forall b . (a -> b -> b) -> b -> b) . foldr k z (build g) = g k z #-}
{-# RULES "foldr/augment" forall k z xs (g::forall b. (a->b->b) -> b -> b) . foldr k z (augment g xs) = g k (foldr k z xs) #-}
{-# RULES "foldr/single" forall k z x. foldr k z [x] = k x z #-}
{-# RULES "augment/build" forall (g::forall b. (a->b->b) -> b -> b)
(h::forall b. (a->b->b) -> b -> b) .
augment g (build h) = build (\c n -> g c (h c n)) #-}
{-# RULES "augment/nil" forall (g::forall b. (a->b->b) -> b -> b) . augment g [] = build g #-}
{-# RULES "foldr/unpackString" forall k z (addr::BitsPtr_) . foldr k z (unpackString addr) = unpackStringFoldr addr k z #-}
-- a few pre-fusioned routines
filterIterate :: (a -> Bool) -> (a -> a) -> a -> [a]
filterIterate p f x = fi x where
fi x | p x = x : fi (f x)
fi x = fi (f x)
mapIterate :: (a -> b) -> (a -> a) -> a -> [b]
mapIterate f g x = fi x where
fi x = f x : fi (g x)
filterMap :: (b -> Bool) -> (a -> b) -> [a] -> [b]
filterMap p f xs = fm xs where
fm (x:xs) = let nx = f x in if p nx then nx:fm xs else fm xs
fm [] = []
mapFilter :: (a -> b) -> (a -> Bool) -> [a] -> [b]
mapFilter f p xs = fm xs where
fm (x:xs) = if p x then f x:fm xs else fm xs
fm [] = []
{-# RULES "tail/map" forall f xs . tail (map f xs) = map f (tail xs) #-}
{-# RULES "head/map" forall f xs . head (map f xs) = f (head xs) #-}
{-# RULES "head/:" forall x xs . head (x:xs) = x #-}
{-# RULES "tail/:" forall x xs . tail (x:xs) = xs #-}
{-# RULES "filter/iterate" forall p f x . filter p (iterate f x) = filterIterate p f x #-}
{-# RULES "map/iterate" forall f g x . map f (iterate g x) = mapIterate f g x #-}
{-# RULES "map/filter" forall f p xs . map f (filter p xs) = mapFilter f p xs #-}
{-# RULES "filter/map" forall f p xs . filter p (map f xs) = filterMap p f xs #-}
-- efficient implementations of prelude routines
{-# CATALYST "and/foldr" forall . and = foldr (&&) True #-}
{-# CATALYST "or/foldr" forall . or = foldr (||) False #-}
and, or :: [Bool] -> Bool
and [] = True
and (False:_) = False
and (True:xs) = and xs
or [] = False
or (True:_) = True
or (False:xs) = or xs
{-# RULES "any/build" forall p (g::forall b.(a->b->b)->b->b) . any p (build g) = g ((||) . p) False #-}
{-# RULES "all/build" forall p (g::forall b.(a->b->b)->b->b) . all p (build g) = g ((&&) . p) True #-}
any, all :: (a -> Bool) -> [a] -> Bool
any p xs = f xs where
f [] = False
f (x:xs) | p x = True
| otherwise = f xs
all p xs = f xs where
f [] = True
f (x:xs) | not (p x) = False
| otherwise = f xs
filter :: (a -> Bool) -> [a] -> [a]
filter p [] = []
filter p (x:xs) | p x = x : filter p xs
| otherwise = filter p xs
-- elem is the list membership predicate, usually written in infix form,
-- e.g., x `elem` xs. notElem is the negation.
infix 4 `elem`, `notElem`
-- the implementation looks a little funny, but the reason for the
-- inner loop is so that both the == function and the unboxing of the
-- argument may occur right away outside the inner loop when the list isn't
-- empty.
elem, notElem :: (Eq a) => a -> [a] -> Bool
elem _ [] = False
elem x (y:ys)
| x == y = True
| otherwise = f y ys where
f y _ | x == y = True
f _ (y:ys) = f y ys
f _ [] = False
{-# SPECIALIZE elem :: Char -> String -> Bool #-}
{-# SPECIALIZE elem :: Int -> [Int] -> Bool #-}
{-# RULES "elem/[]" forall c . elem c [] = False #-}
{-# RULES "elem/[_]" forall c v . elem c [v] = c == v #-}
notElem _ [] = True
notElem x (y:ys)
| x == y = False
| otherwise = f y ys where
f y ys | x == y = False
f _ (y:ys) = f y ys
f _ [] = True
{-# SPECIALIZE notElem :: Char -> String -> Bool #-}
{-# SPECIALIZE notElem :: Int -> [Int] -> Bool #-}
{-# RULES "notElem/[]" forall c . notElem c [] = True #-}
{-# RULES "notElem/[_]" forall c v . notElem c [v] = c /= v #-}
infixl 9 !!
(!!) :: [a] -> Int -> a
xs !! n | n < zero = error "Prelude.(!!): negative index"
| otherwise = sub xs n where
sub :: [a] -> Int -> a
sub _ n | n `seq` False = undefined
sub [] _ = error "Prelude.(!!): index too large"
sub (y:ys) n = if n == zero
then y
else sub ys $! (n `minus` one)
null :: [a] -> Bool
null [] = True
null (_:_) = False
-- length returns the length of a finite list as an Int.
length :: [a] -> Int
length xs = f xs zero where
f [] n = n
f (_:xs) n = f xs $! n `plus` one
head :: [a] -> a
head (x:_) = x
head [] = error "Prelude.head: empty list"
tail :: [a] -> [a]
tail (_:xs) = xs
tail [] = error "Prelude.tail: empty list"
last :: [a] -> a
last [] = error "Prelude.last: empty list"
last (x:xs) = last' x xs where
last' x [] = x
last' _ (y:ys) = last' y xs
init :: [a] -> [a]
init [] = error "Prelude.init: empty list"
init (x:xs) = init' x xs where
init' _ [] = []
init' y (z:zs) = y:init' z zs
{-# RULES "head/iterate" forall f x . head (iterate f x) = x #-}
{-# RULES "head/repeat" forall x . head (repeat x) = x #-}
{-# RULES "tail/repeat" forall x . tail (repeat x) = repeat x #-}
{-# RULES "tail/iterate" forall f x . tail (iterate f x) = iterate f (f x) #-}
{-# RULES "iterate/id" forall . iterate id = repeat #-}
foldl1 :: (a -> a -> a) -> [a] -> a
foldl1 f (x:xs) = foldl f x xs
foldl1 _ [] = error "Prelude.foldl1: empty list"
scanl1 :: (a -> a -> a) -> [a] -> [a]
scanl1 f (x:xs) = scanl f x xs
scanl1 _ [] = []
foldr1 :: (a -> a -> a) -> [a] -> a
foldr1 f [x] = x
foldr1 f (x:xs) = f x (foldr1 f xs)
foldr1 _ [] = error "Prelude.foldr1: empty list"
scanr :: (a -> b -> b) -> b -> [a] -> [b]
scanr f q0 [] = [q0]
scanr f q0 (x:xs) = f x q : qs where qs@(q:_) = scanr f q0 xs
scanr1 :: (a -> a -> a) -> [a] -> [a]
scanr1 f [] = []
scanr1 f [x] = [x]
scanr1 f (x:xs) = f x q : qs where qs@(q:_) = scanr1 f xs
{-
concatMap f = foldr ((++) . f) []
--concat xss = foldr (++) [] xss
concat xss = foldr (++) [] xss
concatMap f = foldr ((++) . f) []
and xs = foldr (&&) True xs
sum xs = foldr (+) (0::Int) xs
(++) xs ys = augment (\c n -> foldr c n xs) ys
concat xs = foldr (++) [] xs
foldl f z xs = foldr (\b g a -> g (f a b)) id xs z
filter p xs = build (\c n -> foldr (filterFB c p) n xs)
{- RULES "filterFB" forall c p q. filterFB (filterFB c p) q = filterFB c (\x -> q x && p x) #-}
{- NOINLINE filterFB #-}
filterFB c p x r | p x = x `c` r
| otherwise = r
{- NOINLINE iterateFB #-}
iterate f x = build (\c _n -> iterateFB c f x)
iterateFB c f x = x `c` iterateFB c f (f x)
head (x:xs) = x
head [] = badHead
map f xs = build (\c n -> foldr (mapFB c f) n xs)
{- NOINLINE mapFB #-}
mapFB :: (elt -> lst -> lst) -> (a -> elt) -> a -> lst -> lst
mapFB c f x ys = c (f x) ys
badHead = error "Prelude.head: empty list"
{-# RULES "head/build" forall (g::forall b.(a->b->b)->b->b) . head (build g) = g (\x _ -> x) badHead #-}
{-# RULES "head/augment" forall xs (g::forall b. (a->b->b) -> b -> b) . head (augment g xs) = g (\x _ -> x) (head xs) #-}
--repeat x = build (\c _n -> repeatFB c x)
--repeatFB c x = xs where xs = x `c` xs
{-
{-# RULES forall xs n (g :: forall b . (a -> b -> b) -> b -> b) . build g !! n = bangBang g n #-}
bangBang :: (forall b . (a -> b -> b) -> b -> b) -> Int -> a
g `bangBang` n
| n < 0 = error "Prelude.(!!): negative index\n"
| otherwise = g c k where
sub _ n | n `seq` False = undefined
sub [] _ = error "Prelude.(!!): index too large\n"
sub (y:ys) n = if n == 0
then y
else sub ys $! (n - 1)
-}
(!!) :: [a] -> Int -> a
xs !! n = foldr bangFB bangCon xs n
bangCon _ = error "!! out of range"
bangFB :: a -> (Int -> a) -> Int -> a
bangFB x _xs m | m == 0 = x
bangFB _x xs m = xs $! (m - 1)
{-# INLINE bangFB #-}
{-# INLINE iterateFB #-}
{-# INLINE (!!) #-}
{-# RULES
"take" [~1] forall n xs . take n xs = case n of I# n# -> build (\c nil -> foldr (takeFB c nil) (takeConst nil) xs n#)
"takeList" [1] forall n xs . foldr (takeFB (:) []) (takeConst []) xs n = takeUInt n xs
#-}
{-# NOINLINE [0] takeConst #-}
-- just a version of const that doesn't get inlined too early, so we
-- can spot it in rules. Also we need a type sig due to the unboxed Int#.
takeConst :: a -> Int# -> a
takeConst x _ = x
{-# NOINLINE [0] takeFB #-}
takeFB :: (a -> b -> c) -> c -> a -> (Int# -> b) -> Int# -> c
takeFB c n x xs m | m <=# 0# = n
| otherwise = x `c` xs (m -# 1#)
-}
-- takeWhile, applied to a predicate p and a list xs, returns the longest
-- prefix (possibly empty) of xs of elements that satisfy p. dropWhile p xs
-- returns the remaining suffix. span p xs is equivalent to
-- (takeWhile p xs, dropWhile p xs), while break p uses the negation of p.
takeWhile :: (a -> Bool) -> [a] -> [a]
takeWhile p [] = []
takeWhile p (x:xs)
| p x = x : takeWhile p xs
| otherwise = []
dropWhile :: (a -> Bool) -> [a] -> [a]
dropWhile p [] = []
dropWhile p xs@(x:xs')
| p x = dropWhile p xs'
| otherwise = xs
span, break :: (a -> Bool) -> [a] -> ([a],[a])
span p [] = ([],[])
span p xs@(x:xs')
| p x = (x:ys,zs)
| otherwise = ([],xs)
where (ys,zs) = span p xs'
{-# INLINE break #-}
break p = span (not . p)
-- take n, applied to a list xs, returns the prefix of xs of length n,
-- or xs itself if n > length xs. drop n xs returns the suffix of xs
-- after the first n elements, or [] if n > length xs. splitAt n xs
-- is equivalent to (take n xs, drop n xs).
take :: Int -> [a] -> [a]
take n xs = f n xs where
f n _ | n <= zero = []
f _ [] = []
f n (x:xs) = x : f (n `minus` one) xs
-- replicate n x is a list of length n with x the value of every element
replicate :: Int -> a -> [a]
replicate n x = f n where
f n | n <= zero = []
f n = let n' = n `minus` one in n' `seq` (x:f n')
splitAt :: Int -> [a] -> ([a],[a])
--splitAt n xs = (take n xs, drop n xs)
splitAt n ls | n < zero = ([], ls)
splitAt n ls = splitAt' n ls where
splitAt' :: Int -> [a] -> ([a], [a])
splitAt' z xs | z == zero = ([], xs)
splitAt' _ [] = ([], [])
splitAt' m (x:xs) = case splitAt' (m `minus` one) xs of
(xs', xs'') -> (x:xs', xs'')
{-# RULES "take/repeat" forall n x . take n (repeat x) = replicate n x #-}
|
dec9ue/jhc_copygc
|
lib/jhc/Jhc/List.hs
|
gpl-2.0
| 11,773 | 8 | 12 | 3,739 | 2,903 | 1,536 | 1,367 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="es-ES">
<title>Reglas de exploración activa | ZAP extensión</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contenidos</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Índice</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Buscar</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoritos</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/ascanrules/src/main/javahelp/org/zaproxy/zap/extension/ascanrules/resources/help_es_ES/helpset_es_ES.hs
|
apache-2.0
| 996 | 80 | 66 | 162 | 423 | 214 | 209 | -1 | -1 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnSource]{Main pass of renamer}
-}
{-# LANGUAGE CPP, ScopedTypeVariables #-}
module RnSource (
rnSrcDecls, addTcgDUs, findSplice
) where
#include "HsVersions.h"
import {-# SOURCE #-} RnExpr( rnLExpr )
import {-# SOURCE #-} RnSplice ( rnSpliceDecl, rnTopSpliceDecls )
import HsSyn
import FieldLabel
import RdrName
import RnTypes
import RnBinds
import RnEnv
import RnNames
import RnHsDoc ( rnHsDoc, rnMbLHsDoc )
import TcAnnotations ( annCtxt )
import TcRnMonad
import ForeignCall ( CCallTarget(..) )
import Module
import HscTypes ( Warnings(..), plusWarns )
import Class ( FunDep )
import PrelNames ( applicativeClassName, pureAName, thenAName
, monadClassName, returnMName, thenMName
, monadFailClassName, failMName, failMName_preMFP
, semigroupClassName, sappendName
, monoidClassName, mappendName
)
import Name
import NameSet
import NameEnv
import Avail
import Outputable
import Bag
import BasicTypes ( DerivStrategy, RuleName, pprRuleName )
import FastString
import SrcLoc
import DynFlags
import Util ( debugIsOn, lengthExceeds, partitionWith )
import HscTypes ( HscEnv, hsc_dflags )
import ListSetOps ( findDupsEq, removeDups, equivClasses )
import Digraph ( SCC, flattenSCC, flattenSCCs
, stronglyConnCompFromEdgedVerticesUniq )
import UniqFM
import qualified GHC.LanguageExtensions as LangExt
import Control.Monad
import Control.Arrow ( first )
import Data.List ( sortBy, mapAccumL )
import Data.Maybe ( isJust )
import qualified Data.Set as Set ( difference, fromList, toList, null )
{-
@rnSourceDecl@ `renames' declarations.
It simultaneously performs dependency analysis and precedence parsing.
It also does the following error checks:
\begin{enumerate}
\item
Checks that tyvars are used properly. This includes checking
for undefined tyvars, and tyvars in contexts that are ambiguous.
(Some of this checking has now been moved to module @TcMonoType@,
since we don't have functional dependency information at this point.)
\item
Checks that all variable occurrences are defined.
\item
Checks the @(..)@ etc constraints in the export list.
\end{enumerate}
-}
-- Brings the binders of the group into scope in the appropriate places;
-- does NOT assume that anything is in scope already
rnSrcDecls :: HsGroup RdrName -> RnM (TcGblEnv, HsGroup Name)
-- Rename a top-level HsGroup; used for normal source files *and* hs-boot files
rnSrcDecls group@(HsGroup { hs_valds = val_decls,
hs_splcds = splice_decls,
hs_tyclds = tycl_decls,
hs_derivds = deriv_decls,
hs_fixds = fix_decls,
hs_warnds = warn_decls,
hs_annds = ann_decls,
hs_fords = foreign_decls,
hs_defds = default_decls,
hs_ruleds = rule_decls,
hs_vects = vect_decls,
hs_docs = docs })
= do {
-- (A) Process the fixity declarations, creating a mapping from
-- FastStrings to FixItems.
-- Also checks for duplicates.
local_fix_env <- makeMiniFixityEnv fix_decls ;
-- (B) Bring top level binders (and their fixities) into scope,
-- *except* for the value bindings, which get done in step (D)
-- with collectHsIdBinders. However *do* include
--
-- * Class ops, data constructors, and record fields,
-- because they do not have value declarations.
-- Aso step (C) depends on datacons and record fields
--
-- * For hs-boot files, include the value signatures
-- Again, they have no value declarations
--
(tc_envs, tc_bndrs) <- getLocalNonValBinders local_fix_env group ;
setEnvs tc_envs $ do {
failIfErrsM ; -- No point in continuing if (say) we have duplicate declarations
-- (D1) Bring pattern synonyms into scope.
-- Need to do this before (D2) because rnTopBindsLHS
-- looks up those pattern synonyms (Trac #9889)
extendPatSynEnv val_decls local_fix_env $ \pat_syn_bndrs -> do {
-- (D2) Rename the left-hand sides of the value bindings.
-- This depends on everything from (B) being in scope,
-- and on (C) for resolving record wild cards.
-- It uses the fixity env from (A) to bind fixities for view patterns.
new_lhs <- rnTopBindsLHS local_fix_env val_decls ;
-- Bind the LHSes (and their fixities) in the global rdr environment
let { id_bndrs = collectHsIdBinders new_lhs } ; -- Excludes pattern-synonym binders
-- They are already in scope
traceRn "rnSrcDecls" (ppr id_bndrs) ;
tc_envs <- extendGlobalRdrEnvRn (map avail id_bndrs) local_fix_env ;
traceRn "D2" (ppr (tcg_rdr_env (fst tc_envs)));
setEnvs tc_envs $ do {
-- Now everything is in scope, as the remaining renaming assumes.
-- (E) Rename type and class decls
-- (note that value LHSes need to be in scope for default methods)
--
-- You might think that we could build proper def/use information
-- for type and class declarations, but they can be involved
-- in mutual recursion across modules, and we only do the SCC
-- analysis for them in the type checker.
-- So we content ourselves with gathering uses only; that
-- means we'll only report a declaration as unused if it isn't
-- mentioned at all. Ah well.
traceRn "Start rnTyClDecls" (ppr tycl_decls) ;
(rn_tycl_decls, src_fvs1) <- rnTyClDecls tycl_decls ;
-- (F) Rename Value declarations right-hand sides
traceRn "Start rnmono" empty ;
let { val_bndr_set = mkNameSet id_bndrs `unionNameSet` mkNameSet pat_syn_bndrs } ;
is_boot <- tcIsHsBootOrSig ;
(rn_val_decls, bind_dus) <- if is_boot
-- For an hs-boot, use tc_bndrs (which collects how we're renamed
-- signatures), since val_bndr_set is empty (there are no x = ...
-- bindings in an hs-boot.)
then rnTopBindsBoot tc_bndrs new_lhs
else rnValBindsRHS (TopSigCtxt val_bndr_set) new_lhs ;
traceRn "finish rnmono" (ppr rn_val_decls) ;
-- (G) Rename Fixity and deprecations
-- Rename fixity declarations and error if we try to
-- fix something from another module (duplicates were checked in (A))
let { all_bndrs = tc_bndrs `unionNameSet` val_bndr_set } ;
rn_fix_decls <- rnSrcFixityDecls all_bndrs fix_decls ;
-- Rename deprec decls;
-- check for duplicates and ensure that deprecated things are defined locally
-- at the moment, we don't keep these around past renaming
rn_warns <- rnSrcWarnDecls all_bndrs warn_decls ;
-- (H) Rename Everything else
(rn_rule_decls, src_fvs2) <- setXOptM LangExt.ScopedTypeVariables $
rnList rnHsRuleDecls rule_decls ;
-- Inside RULES, scoped type variables are on
(rn_vect_decls, src_fvs3) <- rnList rnHsVectDecl vect_decls ;
(rn_foreign_decls, src_fvs4) <- rnList rnHsForeignDecl foreign_decls ;
(rn_ann_decls, src_fvs5) <- rnList rnAnnDecl ann_decls ;
(rn_default_decls, src_fvs6) <- rnList rnDefaultDecl default_decls ;
(rn_deriv_decls, src_fvs7) <- rnList rnSrcDerivDecl deriv_decls ;
(rn_splice_decls, src_fvs8) <- rnList rnSpliceDecl splice_decls ;
-- Haddock docs; no free vars
rn_docs <- mapM (wrapLocM rnDocDecl) docs ;
last_tcg_env <- getGblEnv ;
-- (I) Compute the results and return
let {rn_group = HsGroup { hs_valds = rn_val_decls,
hs_splcds = rn_splice_decls,
hs_tyclds = rn_tycl_decls,
hs_derivds = rn_deriv_decls,
hs_fixds = rn_fix_decls,
hs_warnds = [], -- warns are returned in the tcg_env
-- (see below) not in the HsGroup
hs_fords = rn_foreign_decls,
hs_annds = rn_ann_decls,
hs_defds = rn_default_decls,
hs_ruleds = rn_rule_decls,
hs_vects = rn_vect_decls,
hs_docs = rn_docs } ;
tcf_bndrs = hsTyClForeignBinders rn_tycl_decls rn_foreign_decls ;
other_def = (Just (mkNameSet tcf_bndrs), emptyNameSet) ;
other_fvs = plusFVs [src_fvs1, src_fvs2, src_fvs3, src_fvs4, src_fvs5,
src_fvs6, src_fvs7, src_fvs8] ;
-- It is tiresome to gather the binders from type and class decls
src_dus = [other_def] `plusDU` bind_dus `plusDU` usesOnly other_fvs ;
-- Instance decls may have occurrences of things bound in bind_dus
-- so we must put other_fvs last
final_tcg_env = let tcg_env' = (last_tcg_env `addTcgDUs` src_dus)
in -- we return the deprecs in the env, not in the HsGroup above
tcg_env' { tcg_warns = tcg_warns tcg_env' `plusWarns` rn_warns };
} ;
traceRn "last" (ppr (tcg_rdr_env final_tcg_env)) ;
traceRn "finish rnSrc" (ppr rn_group) ;
traceRn "finish Dus" (ppr src_dus ) ;
return (final_tcg_env, rn_group)
}}}}
addTcgDUs :: TcGblEnv -> DefUses -> TcGblEnv
-- This function could be defined lower down in the module hierarchy,
-- but there doesn't seem anywhere very logical to put it.
addTcgDUs tcg_env dus = tcg_env { tcg_dus = tcg_dus tcg_env `plusDU` dus }
rnList :: (a -> RnM (b, FreeVars)) -> [Located a] -> RnM ([Located b], FreeVars)
rnList f xs = mapFvRn (wrapLocFstM f) xs
{-
*********************************************************
* *
HsDoc stuff
* *
*********************************************************
-}
rnDocDecl :: DocDecl -> RnM DocDecl
rnDocDecl (DocCommentNext doc) = do
rn_doc <- rnHsDoc doc
return (DocCommentNext rn_doc)
rnDocDecl (DocCommentPrev doc) = do
rn_doc <- rnHsDoc doc
return (DocCommentPrev rn_doc)
rnDocDecl (DocCommentNamed str doc) = do
rn_doc <- rnHsDoc doc
return (DocCommentNamed str rn_doc)
rnDocDecl (DocGroup lev doc) = do
rn_doc <- rnHsDoc doc
return (DocGroup lev rn_doc)
{-
*********************************************************
* *
Source-code fixity declarations
* *
*********************************************************
-}
rnSrcFixityDecls :: NameSet -> [LFixitySig RdrName] -> RnM [LFixitySig Name]
-- Rename the fixity decls, so we can put
-- the renamed decls in the renamed syntax tree
-- Errors if the thing being fixed is not defined locally.
--
-- The returned FixitySigs are not actually used for anything,
-- except perhaps the GHCi API
rnSrcFixityDecls bndr_set fix_decls
= do fix_decls <- mapM rn_decl fix_decls
return (concat fix_decls)
where
sig_ctxt = TopSigCtxt bndr_set
rn_decl :: LFixitySig RdrName -> RnM [LFixitySig Name]
-- GHC extension: look up both the tycon and data con
-- for con-like things; hence returning a list
-- If neither are in scope, report an error; otherwise
-- return a fixity sig for each (slightly odd)
rn_decl (L loc (FixitySig fnames fixity))
= do names <- mapM lookup_one fnames
return [ L loc (FixitySig name fixity)
| name <- names ]
lookup_one :: Located RdrName -> RnM [Located Name]
lookup_one (L name_loc rdr_name)
= setSrcSpan name_loc $
-- this lookup will fail if the definition isn't local
do names <- lookupLocalTcNames sig_ctxt what rdr_name
return [ L name_loc name | (_, name) <- names ]
what = text "fixity signature"
{-
*********************************************************
* *
Source-code deprecations declarations
* *
*********************************************************
Check that the deprecated names are defined, are defined locally, and
that there are no duplicate deprecations.
It's only imported deprecations, dealt with in RnIfaces, that we
gather them together.
-}
-- checks that the deprecations are defined locally, and that there are no duplicates
rnSrcWarnDecls :: NameSet -> [LWarnDecls RdrName] -> RnM Warnings
rnSrcWarnDecls _ []
= return NoWarnings
rnSrcWarnDecls bndr_set decls'
= do { -- check for duplicates
; mapM_ (\ dups -> let (L loc rdr:lrdr':_) = dups
in addErrAt loc (dupWarnDecl lrdr' rdr))
warn_rdr_dups
; pairs_s <- mapM (addLocM rn_deprec) decls
; return (WarnSome ((concat pairs_s))) }
where
decls = concatMap (\(L _ d) -> wd_warnings d) decls'
sig_ctxt = TopSigCtxt bndr_set
rn_deprec (Warning rdr_names txt)
-- ensures that the names are defined locally
= do { names <- concatMapM (lookupLocalTcNames sig_ctxt what . unLoc)
rdr_names
; return [(rdrNameOcc rdr, txt) | (rdr, _) <- names] }
what = text "deprecation"
warn_rdr_dups = findDupRdrNames $ concatMap (\(L _ (Warning ns _)) -> ns)
decls
findDupRdrNames :: [Located RdrName] -> [[Located RdrName]]
findDupRdrNames = findDupsEq (\ x -> \ y -> rdrNameOcc (unLoc x) == rdrNameOcc (unLoc y))
-- look for duplicates among the OccNames;
-- we check that the names are defined above
-- invt: the lists returned by findDupsEq always have at least two elements
dupWarnDecl :: Located RdrName -> RdrName -> SDoc
-- Located RdrName -> DeprecDecl RdrName -> SDoc
dupWarnDecl (L loc _) rdr_name
= vcat [text "Multiple warning declarations for" <+> quotes (ppr rdr_name),
text "also at " <+> ppr loc]
{-
*********************************************************
* *
\subsection{Annotation declarations}
* *
*********************************************************
-}
rnAnnDecl :: AnnDecl RdrName -> RnM (AnnDecl Name, FreeVars)
rnAnnDecl ann@(HsAnnotation s provenance expr)
= addErrCtxt (annCtxt ann) $
do { (provenance', provenance_fvs) <- rnAnnProvenance provenance
; (expr', expr_fvs) <- setStage (Splice Untyped) $
rnLExpr expr
; return (HsAnnotation s provenance' expr',
provenance_fvs `plusFV` expr_fvs) }
rnAnnProvenance :: AnnProvenance RdrName -> RnM (AnnProvenance Name, FreeVars)
rnAnnProvenance provenance = do
provenance' <- traverse lookupTopBndrRn provenance
return (provenance', maybe emptyFVs unitFV (annProvenanceName_maybe provenance'))
{-
*********************************************************
* *
\subsection{Default declarations}
* *
*********************************************************
-}
rnDefaultDecl :: DefaultDecl RdrName -> RnM (DefaultDecl Name, FreeVars)
rnDefaultDecl (DefaultDecl tys)
= do { (tys', fvs) <- rnLHsTypes doc_str tys
; return (DefaultDecl tys', fvs) }
where
doc_str = DefaultDeclCtx
{-
*********************************************************
* *
\subsection{Foreign declarations}
* *
*********************************************************
-}
rnHsForeignDecl :: ForeignDecl RdrName -> RnM (ForeignDecl Name, FreeVars)
rnHsForeignDecl (ForeignImport { fd_name = name, fd_sig_ty = ty, fd_fi = spec })
= do { topEnv :: HscEnv <- getTopEnv
; name' <- lookupLocatedTopBndrRn name
; (ty', fvs) <- rnHsSigType (ForeignDeclCtx name) ty
-- Mark any PackageTarget style imports as coming from the current package
; let unitId = thisPackage $ hsc_dflags topEnv
spec' = patchForeignImport unitId spec
; return (ForeignImport { fd_name = name', fd_sig_ty = ty'
, fd_co = noForeignImportCoercionYet
, fd_fi = spec' }, fvs) }
rnHsForeignDecl (ForeignExport { fd_name = name, fd_sig_ty = ty, fd_fe = spec })
= do { name' <- lookupLocatedOccRn name
; (ty', fvs) <- rnHsSigType (ForeignDeclCtx name) ty
; return (ForeignExport { fd_name = name', fd_sig_ty = ty'
, fd_co = noForeignExportCoercionYet
, fd_fe = spec }
, fvs `addOneFV` unLoc name') }
-- NB: a foreign export is an *occurrence site* for name, so
-- we add it to the free-variable list. It might, for example,
-- be imported from another module
-- | For Windows DLLs we need to know what packages imported symbols are from
-- to generate correct calls. Imported symbols are tagged with the current
-- package, so if they get inlined across a package boundry we'll still
-- know where they're from.
--
patchForeignImport :: UnitId -> ForeignImport -> ForeignImport
patchForeignImport unitId (CImport cconv safety fs spec src)
= CImport cconv safety fs (patchCImportSpec unitId spec) src
patchCImportSpec :: UnitId -> CImportSpec -> CImportSpec
patchCImportSpec unitId spec
= case spec of
CFunction callTarget -> CFunction $ patchCCallTarget unitId callTarget
_ -> spec
patchCCallTarget :: UnitId -> CCallTarget -> CCallTarget
patchCCallTarget unitId callTarget =
case callTarget of
StaticTarget src label Nothing isFun
-> StaticTarget src label (Just unitId) isFun
_ -> callTarget
{-
*********************************************************
* *
\subsection{Instance declarations}
* *
*********************************************************
-}
rnSrcInstDecl :: InstDecl RdrName -> RnM (InstDecl Name, FreeVars)
rnSrcInstDecl (TyFamInstD { tfid_inst = tfi })
= do { (tfi', fvs) <- rnTyFamInstDecl Nothing tfi
; return (TyFamInstD { tfid_inst = tfi' }, fvs) }
rnSrcInstDecl (DataFamInstD { dfid_inst = dfi })
= do { (dfi', fvs) <- rnDataFamInstDecl Nothing dfi
; return (DataFamInstD { dfid_inst = dfi' }, fvs) }
rnSrcInstDecl (ClsInstD { cid_inst = cid })
= do { (cid', fvs) <- rnClsInstDecl cid
; return (ClsInstD { cid_inst = cid' }, fvs) }
-- | Warn about non-canonical typeclass instance declarations
--
-- A "non-canonical" instance definition can occur for instances of a
-- class which redundantly defines an operation its superclass
-- provides as well (c.f. `return`/`pure`). In such cases, a canonical
-- instance is one where the subclass inherits its method
-- implementation from its superclass instance (usually the subclass
-- has a default method implementation to that effect). Consequently,
-- a non-canonical instance occurs when this is not the case.
--
-- See also descriptions of 'checkCanonicalMonadInstances' and
-- 'checkCanonicalMonoidInstances'
checkCanonicalInstances :: Name -> LHsSigType Name -> LHsBinds Name -> RnM ()
checkCanonicalInstances cls poly_ty mbinds = do
whenWOptM Opt_WarnNonCanonicalMonadInstances
checkCanonicalMonadInstances
whenWOptM Opt_WarnNonCanonicalMonadFailInstances
checkCanonicalMonadFailInstances
whenWOptM Opt_WarnNonCanonicalMonoidInstances
checkCanonicalMonoidInstances
where
-- | Warn about unsound/non-canonical 'Applicative'/'Monad' instance
-- declarations. Specifically, the following conditions are verified:
--
-- In 'Monad' instances declarations:
--
-- * If 'return' is overridden it must be canonical (i.e. @return = pure@)
-- * If '(>>)' is overridden it must be canonical (i.e. @(>>) = (*>)@)
--
-- In 'Applicative' instance declarations:
--
-- * Warn if 'pure' is defined backwards (i.e. @pure = return@).
-- * Warn if '(*>)' is defined backwards (i.e. @(*>) = (>>)@).
--
checkCanonicalMonadInstances
| cls == applicativeClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == pureAName, isAliasMG mg == Just returnMName
-> addWarnNonCanonicalMethod1
Opt_WarnNonCanonicalMonadInstances "pure" "return"
| name == thenAName, isAliasMG mg == Just thenMName
-> addWarnNonCanonicalMethod1
Opt_WarnNonCanonicalMonadInstances "(*>)" "(>>)"
_ -> return ()
| cls == monadClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == returnMName, isAliasMG mg /= Just pureAName
-> addWarnNonCanonicalMethod2
Opt_WarnNonCanonicalMonadInstances "return" "pure"
| name == thenMName, isAliasMG mg /= Just thenAName
-> addWarnNonCanonicalMethod2
Opt_WarnNonCanonicalMonadInstances "(>>)" "(*>)"
_ -> return ()
| otherwise = return ()
-- | Warn about unsound/non-canonical 'Monad'/'MonadFail' instance
-- declarations. Specifically, the following conditions are verified:
--
-- In 'Monad' instances declarations:
--
-- * If 'fail' is overridden it must be canonical
-- (i.e. @fail = Control.Monad.Fail.fail@)
--
-- In 'MonadFail' instance declarations:
--
-- * Warn if 'fail' is defined backwards
-- (i.e. @fail = Control.Monad.fail@).
--
checkCanonicalMonadFailInstances
| cls == monadFailClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == failMName, isAliasMG mg == Just failMName_preMFP
-> addWarnNonCanonicalMethod1
Opt_WarnNonCanonicalMonadFailInstances "fail"
"Control.Monad.fail"
_ -> return ()
| cls == monadClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == failMName_preMFP, isAliasMG mg /= Just failMName
-> addWarnNonCanonicalMethod2
Opt_WarnNonCanonicalMonadFailInstances "fail"
"Control.Monad.Fail.fail"
_ -> return ()
| otherwise = return ()
-- | Check whether Monoid(mappend) is defined in terms of
-- Semigroup((<>)) (and not the other way round). Specifically,
-- the following conditions are verified:
--
-- In 'Monoid' instances declarations:
--
-- * If 'mappend' is overridden it must be canonical
-- (i.e. @mappend = (<>)@)
--
-- In 'Semigroup' instance declarations:
--
-- * Warn if '(<>)' is defined backwards (i.e. @(<>) = mappend@).
--
checkCanonicalMonoidInstances
| cls == semigroupClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == sappendName, isAliasMG mg == Just mappendName
-> addWarnNonCanonicalMethod1
Opt_WarnNonCanonicalMonoidInstances "(<>)" "mappend"
_ -> return ()
| cls == monoidClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == mappendName, isAliasMG mg /= Just sappendName
-> addWarnNonCanonicalMethod2NoDefault
Opt_WarnNonCanonicalMonoidInstances "mappend" "(<>)"
_ -> return ()
| otherwise = return ()
-- | test whether MatchGroup represents a trivial \"lhsName = rhsName\"
-- binding, and return @Just rhsName@ if this is the case
isAliasMG :: MatchGroup Name (LHsExpr Name) -> Maybe Name
isAliasMG MG {mg_alts = L _ [L _ (Match { m_pats = [], m_grhss = grhss })]}
| GRHSs [L _ (GRHS [] body)] lbinds <- grhss
, L _ EmptyLocalBinds <- lbinds
, L _ (HsVar (L _ rhsName)) <- body = Just rhsName
isAliasMG _ = Nothing
-- got "lhs = rhs" but expected something different
addWarnNonCanonicalMethod1 flag lhs rhs = do
addWarn (Reason flag) $ vcat
[ text "Noncanonical" <+>
quotes (text (lhs ++ " = " ++ rhs)) <+>
text "definition detected"
, instDeclCtxt1 poly_ty
, text "Move definition from" <+>
quotes (text rhs) <+>
text "to" <+> quotes (text lhs)
]
-- expected "lhs = rhs" but got something else
addWarnNonCanonicalMethod2 flag lhs rhs = do
addWarn (Reason flag) $ vcat
[ text "Noncanonical" <+>
quotes (text lhs) <+>
text "definition detected"
, instDeclCtxt1 poly_ty
, text "Either remove definition for" <+>
quotes (text lhs) <+> text "or define as" <+>
quotes (text (lhs ++ " = " ++ rhs))
]
-- like above, but method has no default impl
addWarnNonCanonicalMethod2NoDefault flag lhs rhs = do
addWarn (Reason flag) $ vcat
[ text "Noncanonical" <+>
quotes (text lhs) <+>
text "definition detected"
, instDeclCtxt1 poly_ty
, text "Define as" <+>
quotes (text (lhs ++ " = " ++ rhs))
]
-- stolen from TcInstDcls
instDeclCtxt1 :: LHsSigType Name -> SDoc
instDeclCtxt1 hs_inst_ty
= inst_decl_ctxt (ppr (getLHsInstDeclHead hs_inst_ty))
inst_decl_ctxt :: SDoc -> SDoc
inst_decl_ctxt doc = hang (text "in the instance declaration for")
2 (quotes doc <> text ".")
rnClsInstDecl :: ClsInstDecl RdrName -> RnM (ClsInstDecl Name, FreeVars)
rnClsInstDecl (ClsInstDecl { cid_poly_ty = inst_ty, cid_binds = mbinds
, cid_sigs = uprags, cid_tyfam_insts = ats
, cid_overlap_mode = oflag
, cid_datafam_insts = adts })
= do { (inst_ty', inst_fvs) <- rnLHsInstType (text "an instance declaration") inst_ty
; let (ktv_names, _, head_ty') = splitLHsInstDeclTy inst_ty'
; let cls = case hsTyGetAppHead_maybe head_ty' of
Nothing -> mkUnboundName (mkTcOccFS (fsLit "<class>"))
Just (L _ cls, _) -> cls
-- rnLHsInstType has added an error message
-- if hsTyGetAppHead_maybe fails
-- Rename the bindings
-- The typechecker (not the renamer) checks that all
-- the bindings are for the right class
-- (Slightly strangely) when scoped type variables are on, the
-- forall-d tyvars scope over the method bindings too
; (mbinds', uprags', meth_fvs) <- rnMethodBinds False cls ktv_names mbinds uprags
; checkCanonicalInstances cls inst_ty' mbinds'
-- Rename the associated types, and type signatures
-- Both need to have the instance type variables in scope
; traceRn "rnSrcInstDecl" (ppr inst_ty' $$ ppr ktv_names)
; ((ats', adts'), more_fvs)
<- extendTyVarEnvFVRn ktv_names $
do { (ats', at_fvs) <- rnATInstDecls rnTyFamInstDecl cls ktv_names ats
; (adts', adt_fvs) <- rnATInstDecls rnDataFamInstDecl cls ktv_names adts
; return ( (ats', adts'), at_fvs `plusFV` adt_fvs) }
; let all_fvs = meth_fvs `plusFV` more_fvs
`plusFV` inst_fvs
; return (ClsInstDecl { cid_poly_ty = inst_ty', cid_binds = mbinds'
, cid_sigs = uprags', cid_tyfam_insts = ats'
, cid_overlap_mode = oflag
, cid_datafam_insts = adts' },
all_fvs) }
-- We return the renamed associated data type declarations so
-- that they can be entered into the list of type declarations
-- for the binding group, but we also keep a copy in the instance.
-- The latter is needed for well-formedness checks in the type
-- checker (eg, to ensure that all ATs of the instance actually
-- receive a declaration).
-- NB: Even the copies in the instance declaration carry copies of
-- the instance context after renaming. This is a bit
-- strange, but should not matter (and it would be more work
-- to remove the context).
rnFamInstDecl :: HsDocContext
-> Maybe (Name, [Name]) -- Nothing => not associated
-- Just (cls,tvs) => associated,
-- and gives class and tyvars of the
-- parent instance delc
-> Located RdrName
-> HsTyPats RdrName
-> rhs
-> (HsDocContext -> rhs -> RnM (rhs', FreeVars))
-> RnM (Located Name, HsTyPats Name, rhs', FreeVars)
rnFamInstDecl doc mb_cls tycon (HsIB { hsib_body = pats }) payload rnPayload
= do { tycon' <- lookupFamInstName (fmap fst mb_cls) tycon
; let loc = case pats of
[] -> pprPanic "rnFamInstDecl" (ppr tycon)
(L loc _ : []) -> loc
(L loc _ : ps) -> combineSrcSpans loc (getLoc (last ps))
; pat_kity_vars_with_dups <- extractHsTysRdrTyVarsDups pats
-- Use the "...Dups" form because it's needed
-- below to report unsed binder on the LHS
; var_names <- mapM (newTyVarNameRn mb_cls . L loc . unLoc) $
freeKiTyVarsAllVars $
rmDupsInRdrTyVars pat_kity_vars_with_dups
-- All the free vars of the family patterns
-- with a sensible binding location
; ((pats', payload'), fvs)
<- bindLocalNamesFV var_names $
do { (pats', pat_fvs) <- rnLHsTypes (FamPatCtx tycon) pats
; (payload', rhs_fvs) <- rnPayload doc payload
-- Report unused binders on the LHS
-- See Note [Unused type variables in family instances]
; let groups :: [[Located RdrName]]
groups = equivClasses cmpLocated $
freeKiTyVarsAllVars pat_kity_vars_with_dups
; tv_nms_dups <- mapM (lookupOccRn . unLoc) $
[ tv | (tv:_:_) <- groups ]
-- Add to the used variables
-- a) any variables that appear *more than once* on the LHS
-- e.g. F a Int a = Bool
-- b) for associated instances, the variables
-- of the instance decl. See
-- Note [Unused type variables in family instances]
; let tv_nms_used = extendNameSetList rhs_fvs $
inst_tvs ++ tv_nms_dups
inst_tvs = case mb_cls of
Nothing -> []
Just (_, inst_tvs) -> inst_tvs
; warnUnusedTypePatterns var_names tv_nms_used
-- See Note [Renaming associated types]
; let bad_tvs = case mb_cls of
Nothing -> []
Just (_,cls_tkvs) -> filter is_bad cls_tkvs
var_name_set = mkNameSet var_names
is_bad cls_tkv = cls_tkv `elemNameSet` rhs_fvs
&& not (cls_tkv `elemNameSet` var_name_set)
; unless (null bad_tvs) (badAssocRhs bad_tvs)
; return ((pats', payload'), rhs_fvs `plusFV` pat_fvs) }
; let anon_wcs = concatMap collectAnonWildCards pats'
all_ibs = anon_wcs ++ var_names
-- all_ibs: include anonymous wildcards in the implicit
-- binders In a type pattern they behave just like any
-- other type variable except for being anoymous. See
-- Note [Wildcards in family instances]
all_fvs = fvs `addOneFV` unLoc tycon'
; return (tycon',
HsIB { hsib_body = pats'
, hsib_vars = all_ibs },
payload',
all_fvs) }
-- type instance => use, hence addOneFV
rnTyFamInstDecl :: Maybe (Name, [Name])
-> TyFamInstDecl RdrName
-> RnM (TyFamInstDecl Name, FreeVars)
rnTyFamInstDecl mb_cls (TyFamInstDecl { tfid_eqn = L loc eqn })
= do { (eqn', fvs) <- rnTyFamInstEqn mb_cls eqn
; return (TyFamInstDecl { tfid_eqn = L loc eqn'
, tfid_fvs = fvs }, fvs) }
rnTyFamInstEqn :: Maybe (Name, [Name])
-> TyFamInstEqn RdrName
-> RnM (TyFamInstEqn Name, FreeVars)
rnTyFamInstEqn mb_cls (TyFamEqn { tfe_tycon = tycon
, tfe_pats = pats
, tfe_fixity = fixity
, tfe_rhs = rhs })
= do { (tycon', pats', rhs', fvs) <-
rnFamInstDecl (TySynCtx tycon) mb_cls tycon pats rhs rnTySyn
; return (TyFamEqn { tfe_tycon = tycon'
, tfe_pats = pats'
, tfe_fixity = fixity
, tfe_rhs = rhs' }, fvs) }
rnTyFamDefltEqn :: Name
-> TyFamDefltEqn RdrName
-> RnM (TyFamDefltEqn Name, FreeVars)
rnTyFamDefltEqn cls (TyFamEqn { tfe_tycon = tycon
, tfe_pats = tyvars
, tfe_fixity = fixity
, tfe_rhs = rhs })
= bindHsQTyVars ctx Nothing (Just cls) [] tyvars $ \ tyvars' _ ->
do { tycon' <- lookupFamInstName (Just cls) tycon
; (rhs', fvs) <- rnLHsType ctx rhs
; return (TyFamEqn { tfe_tycon = tycon'
, tfe_pats = tyvars'
, tfe_fixity = fixity
, tfe_rhs = rhs' }, fvs) }
where
ctx = TyFamilyCtx tycon
rnDataFamInstDecl :: Maybe (Name, [Name])
-> DataFamInstDecl RdrName
-> RnM (DataFamInstDecl Name, FreeVars)
rnDataFamInstDecl mb_cls (DataFamInstDecl { dfid_tycon = tycon
, dfid_pats = pats
, dfid_fixity = fixity
, dfid_defn = defn })
= do { (tycon', pats', (defn', _), fvs) <-
rnFamInstDecl (TyDataCtx tycon) mb_cls tycon pats defn rnDataDefn
; return (DataFamInstDecl { dfid_tycon = tycon'
, dfid_pats = pats'
, dfid_fixity = fixity
, dfid_defn = defn'
, dfid_fvs = fvs }, fvs) }
-- Renaming of the associated types in instances.
-- Rename associated type family decl in class
rnATDecls :: Name -- Class
-> [LFamilyDecl RdrName]
-> RnM ([LFamilyDecl Name], FreeVars)
rnATDecls cls at_decls
= rnList (rnFamDecl (Just cls)) at_decls
rnATInstDecls :: (Maybe (Name, [Name]) -> -- The function that renames
decl RdrName -> -- an instance. rnTyFamInstDecl
RnM (decl Name, FreeVars)) -- or rnDataFamInstDecl
-> Name -- Class
-> [Name]
-> [Located (decl RdrName)]
-> RnM ([Located (decl Name)], FreeVars)
-- Used for data and type family defaults in a class decl
-- and the family instance declarations in an instance
--
-- NB: We allow duplicate associated-type decls;
-- See Note [Associated type instances] in TcInstDcls
rnATInstDecls rnFun cls tv_ns at_insts
= rnList (rnFun (Just (cls, tv_ns))) at_insts
-- See Note [Renaming associated types]
{- Note [Wildcards in family instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Wild cards can be used in type/data family instance declarations to indicate
that the name of a type variable doesn't matter. Each wild card will be
replaced with a new unique type variable. For instance:
type family F a b :: *
type instance F Int _ = Int
is the same as
type family F a b :: *
type instance F Int b = Int
This is implemented as follows: during renaming anonymous wild cards
'_' are given freshly generated names. These names are collected after
renaming (rnFamInstDecl) and used to make new type variables during
type checking (tc_fam_ty_pats). One should not confuse these wild
cards with the ones from partial type signatures. The latter generate
fresh meta-variables whereas the former generate fresh skolems.
Note [Unused type variables in family instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the flag -fwarn-unused-type-patterns is on, the compiler reports
warnings about unused type variables in type-family instances. A
tpye variable is considered used (i.e. cannot be turned into a wildcard)
when
* it occurs on the RHS of the family instance
e.g. type instance F a b = a -- a is used on the RHS
* it occurs multiple times in the patterns on the LHS
e.g. type instance F a a = Int -- a appears more than once on LHS
* it is one of the instance-decl variables, for associated types
e.g. instance C (a,b) where
type T (a,b) = a
Here the type pattern in the type instance must be the same as that
for the class instance, so
type T (a,_) = a
would be rejected. So we should not complain about an unused variable b
As usual, the warnings are not reported for for type variables with names
beginning with an underscore.
Extra-constraints wild cards are not supported in type/data family
instance declarations.
Relevant tickets: #3699, #10586, #10982 and #11451.
Note [Renaming associated types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Check that the RHS of the decl mentions only type variables
bound on the LHS. For example, this is not ok
class C a b where
type F a x :: *
instance C (p,q) r where
type F (p,q) x = (x, r) -- BAD: mentions 'r'
c.f. Trac #5515
The same thing applies to kind variables, of course (Trac #7938, #9574):
class Funct f where
type Codomain f :: *
instance Funct ('KProxy :: KProxy o) where
type Codomain 'KProxy = NatTr (Proxy :: o -> *)
Here 'o' is mentioned on the RHS of the Codomain function, but
not on the LHS.
All this applies only for *instance* declarations. In *class*
declarations there is no RHS to worry about, and the class variables
can all be in scope (Trac #5862):
class Category (x :: k -> k -> *) where
type Ob x :: k -> Constraint
id :: Ob x a => x a a
(.) :: (Ob x a, Ob x b, Ob x c) => x b c -> x a b -> x a c
Here 'k' is in scope in the kind signature, just like 'x'.
-}
{-
*********************************************************
* *
\subsection{Stand-alone deriving declarations}
* *
*********************************************************
-}
rnSrcDerivDecl :: DerivDecl RdrName -> RnM (DerivDecl Name, FreeVars)
rnSrcDerivDecl (DerivDecl ty deriv_strat overlap)
= do { standalone_deriv_ok <- xoptM LangExt.StandaloneDeriving
; deriv_strats_ok <- xoptM LangExt.DerivingStrategies
; unless standalone_deriv_ok (addErr standaloneDerivErr)
; failIfTc (isJust deriv_strat && not deriv_strats_ok) $
illegalDerivStrategyErr $ fmap unLoc deriv_strat
; (ty', fvs) <- rnLHsInstType (text "In a deriving declaration") ty
; return (DerivDecl ty' deriv_strat overlap, fvs) }
standaloneDerivErr :: SDoc
standaloneDerivErr
= hang (text "Illegal standalone deriving declaration")
2 (text "Use StandaloneDeriving to enable this extension")
{-
*********************************************************
* *
\subsection{Rules}
* *
*********************************************************
-}
rnHsRuleDecls :: RuleDecls RdrName -> RnM (RuleDecls Name, FreeVars)
rnHsRuleDecls (HsRules src rules)
= do { (rn_rules,fvs) <- rnList rnHsRuleDecl rules
; return (HsRules src rn_rules,fvs) }
rnHsRuleDecl :: RuleDecl RdrName -> RnM (RuleDecl Name, FreeVars)
rnHsRuleDecl (HsRule rule_name act vars lhs _fv_lhs rhs _fv_rhs)
= do { let rdr_names_w_loc = map get_var vars
; checkDupRdrNames rdr_names_w_loc
; checkShadowedRdrNames rdr_names_w_loc
; names <- newLocalBndrsRn rdr_names_w_loc
; bindHsRuleVars (snd $ unLoc rule_name) vars names $ \ vars' ->
do { (lhs', fv_lhs') <- rnLExpr lhs
; (rhs', fv_rhs') <- rnLExpr rhs
; checkValidRule (snd $ unLoc rule_name) names lhs' fv_lhs'
; return (HsRule rule_name act vars' lhs' fv_lhs' rhs' fv_rhs',
fv_lhs' `plusFV` fv_rhs') } }
where
get_var (L _ (RuleBndrSig v _)) = v
get_var (L _ (RuleBndr v)) = v
bindHsRuleVars :: RuleName -> [LRuleBndr RdrName] -> [Name]
-> ([LRuleBndr Name] -> RnM (a, FreeVars))
-> RnM (a, FreeVars)
bindHsRuleVars rule_name vars names thing_inside
= go vars names $ \ vars' ->
bindLocalNamesFV names (thing_inside vars')
where
doc = RuleCtx rule_name
go (L l (RuleBndr (L loc _)) : vars) (n : ns) thing_inside
= go vars ns $ \ vars' ->
thing_inside (L l (RuleBndr (L loc n)) : vars')
go (L l (RuleBndrSig (L loc _) bsig) : vars) (n : ns) thing_inside
= rnHsSigWcTypeScoped doc bsig $ \ bsig' ->
go vars ns $ \ vars' ->
thing_inside (L l (RuleBndrSig (L loc n) bsig') : vars')
go [] [] thing_inside = thing_inside []
go vars names _ = pprPanic "bindRuleVars" (ppr vars $$ ppr names)
{-
Note [Rule LHS validity checking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Check the shape of a transformation rule LHS. Currently we only allow
LHSs of the form @(f e1 .. en)@, where @f@ is not one of the
@forall@'d variables.
We used restrict the form of the 'ei' to prevent you writing rules
with LHSs with a complicated desugaring (and hence unlikely to match);
(e.g. a case expression is not allowed: too elaborate.)
But there are legitimate non-trivial args ei, like sections and
lambdas. So it seems simmpler not to check at all, and that is why
check_e is commented out.
-}
checkValidRule :: FastString -> [Name] -> LHsExpr Name -> NameSet -> RnM ()
checkValidRule rule_name ids lhs' fv_lhs'
= do { -- Check for the form of the LHS
case (validRuleLhs ids lhs') of
Nothing -> return ()
Just bad -> failWithTc (badRuleLhsErr rule_name lhs' bad)
-- Check that LHS vars are all bound
; let bad_vars = [var | var <- ids, not (var `elemNameSet` fv_lhs')]
; mapM_ (addErr . badRuleVar rule_name) bad_vars }
validRuleLhs :: [Name] -> LHsExpr Name -> Maybe (HsExpr Name)
-- Nothing => OK
-- Just e => Not ok, and e is the offending sub-expression
validRuleLhs foralls lhs
= checkl lhs
where
checkl (L _ e) = check e
check (OpApp e1 op _ e2) = checkl op `mplus` checkl_e e1 `mplus` checkl_e e2
check (HsApp e1 e2) = checkl e1 `mplus` checkl_e e2
check (HsAppType e _) = checkl e
check (HsVar (L _ v)) | v `notElem` foralls = Nothing
check other = Just other -- Failure
-- Check an argument
checkl_e (L _ _e) = Nothing -- Was (check_e e); see Note [Rule LHS validity checking]
{- Commented out; see Note [Rule LHS validity checking] above
check_e (HsVar v) = Nothing
check_e (HsPar e) = checkl_e e
check_e (HsLit e) = Nothing
check_e (HsOverLit e) = Nothing
check_e (OpApp e1 op _ e2) = checkl_e e1 `mplus` checkl_e op `mplus` checkl_e e2
check_e (HsApp e1 e2) = checkl_e e1 `mplus` checkl_e e2
check_e (NegApp e _) = checkl_e e
check_e (ExplicitList _ es) = checkl_es es
check_e other = Just other -- Fails
checkl_es es = foldr (mplus . checkl_e) Nothing es
-}
badRuleVar :: FastString -> Name -> SDoc
badRuleVar name var
= sep [text "Rule" <+> doubleQuotes (ftext name) <> colon,
text "Forall'd variable" <+> quotes (ppr var) <+>
text "does not appear on left hand side"]
badRuleLhsErr :: FastString -> LHsExpr Name -> HsExpr Name -> SDoc
badRuleLhsErr name lhs bad_e
= sep [text "Rule" <+> pprRuleName name <> colon,
nest 4 (vcat [err,
text "in left-hand side:" <+> ppr lhs])]
$$
text "LHS must be of form (f e1 .. en) where f is not forall'd"
where
err = case bad_e of
HsUnboundVar uv -> text "Not in scope:" <+> ppr uv
_ -> text "Illegal expression:" <+> ppr bad_e
{-
*********************************************************
* *
\subsection{Vectorisation declarations}
* *
*********************************************************
-}
rnHsVectDecl :: VectDecl RdrName -> RnM (VectDecl Name, FreeVars)
-- FIXME: For the moment, the right-hand side is restricted to be a variable as we cannot properly
-- typecheck a complex right-hand side without invoking 'vectType' from the vectoriser.
rnHsVectDecl (HsVect s var rhs@(L _ (HsVar _)))
= do { var' <- lookupLocatedOccRn var
; (rhs', fv_rhs) <- rnLExpr rhs
; return (HsVect s var' rhs', fv_rhs `addOneFV` unLoc var')
}
rnHsVectDecl (HsVect _ _var _rhs)
= failWith $ vcat
[ text "IMPLEMENTATION RESTRICTION: right-hand side of a VECTORISE pragma"
, text "must be an identifier"
]
rnHsVectDecl (HsNoVect s var)
= do { var' <- lookupLocatedTopBndrRn var -- only applies to local (not imported) names
; return (HsNoVect s var', unitFV (unLoc var'))
}
rnHsVectDecl (HsVectTypeIn s isScalar tycon Nothing)
= do { tycon' <- lookupLocatedOccRn tycon
; return (HsVectTypeIn s isScalar tycon' Nothing, unitFV (unLoc tycon'))
}
rnHsVectDecl (HsVectTypeIn s isScalar tycon (Just rhs_tycon))
= do { tycon' <- lookupLocatedOccRn tycon
; rhs_tycon' <- lookupLocatedOccRn rhs_tycon
; return ( HsVectTypeIn s isScalar tycon' (Just rhs_tycon')
, mkFVs [unLoc tycon', unLoc rhs_tycon'])
}
rnHsVectDecl (HsVectTypeOut _ _ _)
= panic "RnSource.rnHsVectDecl: Unexpected 'HsVectTypeOut'"
rnHsVectDecl (HsVectClassIn s cls)
= do { cls' <- lookupLocatedOccRn cls
; return (HsVectClassIn s cls', unitFV (unLoc cls'))
}
rnHsVectDecl (HsVectClassOut _)
= panic "RnSource.rnHsVectDecl: Unexpected 'HsVectClassOut'"
rnHsVectDecl (HsVectInstIn instTy)
= do { (instTy', fvs) <- rnLHsInstType (text "a VECTORISE pragma") instTy
; return (HsVectInstIn instTy', fvs)
}
rnHsVectDecl (HsVectInstOut _)
= panic "RnSource.rnHsVectDecl: Unexpected 'HsVectInstOut'"
{- **************************************************************
* *
Renaming type, class, instance and role declarations
* *
*****************************************************************
@rnTyDecl@ uses the `global name function' to create a new type
declaration in which local names have been replaced by their original
names, reporting any unknown names.
Renaming type variables is a pain. Because they now contain uniques,
it is necessary to pass in an association list which maps a parsed
tyvar to its @Name@ representation.
In some cases (type signatures of values),
it is even necessary to go over the type first
in order to get the set of tyvars used by it, make an assoc list,
and then go over it again to rename the tyvars!
However, we can also do some scoping checks at the same time.
Note [Dependency analysis of type, class, and instance decls]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A TyClGroup represents a strongly connected components of
type/class/instance decls, together with the role annotations for the
type/class declarations. The renamer uses strongly connected
comoponent analysis to build these groups. We do this for a number of
reasons:
* Improve kind error messages. Consider
data T f a = MkT f a
data S f a = MkS f (T f a)
This has a kind error, but the error message is better if you
check T first, (fixing its kind) and *then* S. If you do kind
inference together, you might get an error reported in S, which
is jolly confusing. See Trac #4875
* Increase kind polymorphism. See TcTyClsDecls
Note [Grouping of type and class declarations]
Why do the instance declarations participate? At least two reasons
* Consider (Trac #11348)
type family F a
type instance F Int = Bool
data R = MkR (F Int)
type Foo = 'MkR 'True
For Foo to kind-check we need to know that (F Int) ~ Bool. But we won't
know that unless we've looked at the type instance declaration for F
before kind-checking Foo.
* Another example is this (Trac #3990).
data family Complex a
data instance Complex Double = CD {-# UNPACK #-} !Double
{-# UNPACK #-} !Double
data T = T {-# UNPACK #-} !(Complex Double)
Here, to generate the right kind of unpacked implementation for T,
we must have access to the 'data instance' declaration.
* Things become more complicated when we introduce transitive
dependencies through imported definitions, like in this scenario:
A.hs
type family Closed (t :: Type) :: Type where
Closed t = Open t
type family Open (t :: Type) :: Type
B.hs
data Q where
Q :: Closed Bool -> Q
type instance Open Int = Bool
type S = 'Q 'True
Somehow, we must ensure that the instance Open Int = Bool is checked before
the type synonym S. While we know that S depends upon 'Q depends upon Closed,
we have no idea that Closed depends upon Open!
To accomodate for these situations, we ensure that an instance is checked
before every @TyClDecl@ on which it does not depend. That's to say, instances
are checked as early as possible in @tcTyAndClassDecls@.
------------------------------------
So much for WHY. What about HOW? It's pretty easy:
(1) Rename the type/class, instance, and role declarations
individually
(2) Do strongly-connected component analysis of the type/class decls,
We'll make a TyClGroup for each SCC
In this step we treat a reference to a (promoted) data constructor
K as a dependency on its parent type. Thus
data T = K1 | K2
data S = MkS (Proxy 'K1)
Here S depends on 'K1 and hence on its parent T.
In this step we ignore instances; see
Note [No dependencies on data instances]
(3) Attach roles to the appropriate SCC
(4) Attach instances to the appropriate SCC.
We add an instance decl to SCC when:
all its free types/classes are bound in this SCC or earlier ones
(5) We make an initial TyClGroup, with empty group_tyclds, for any
(orphan) instances that affect only imported types/classes
Steps (3) and (4) are done by the (mapAccumL mk_group) call.
Note [No dependencies on data instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
data family D a
data instance D Int = D1
data S = MkS (Proxy 'D1)
Here the declaration of S depends on the /data instance/ declaration
for 'D Int'. That makes things a lot more complicated, especially
if the data instance is an associated type of an enclosing class instance.
(And the class instance might have several associated type instances
with different dependency structure!)
Ugh. For now we simply don't allow promotion of data constructors for
data instances. See Note [AFamDataCon: not promoting data family
constructors] in TcEnv
-}
rnTyClDecls :: [TyClGroup RdrName]
-> RnM ([TyClGroup Name], FreeVars)
-- Rename the declarations and do dependency analysis on them
rnTyClDecls tycl_ds
= do { -- Rename the type/class, instance, and role declaraations
tycls_w_fvs <- mapM (wrapLocFstM rnTyClDecl)
(tyClGroupTyClDecls tycl_ds)
; let tc_names = mkNameSet (map (tcdName . unLoc . fst) tycls_w_fvs)
; instds_w_fvs <- mapM (wrapLocFstM rnSrcInstDecl) (tyClGroupInstDecls tycl_ds)
; role_annots <- rnRoleAnnots tc_names (tyClGroupRoleDecls tycl_ds)
; tycls_w_fvs <- addBootDeps tycls_w_fvs
-- TBD must add_boot_deps to instds_w_fvs?
-- Do SCC analysis on the type/class decls
; rdr_env <- getGlobalRdrEnv
; let tycl_sccs = depAnalTyClDecls rdr_env tycls_w_fvs
role_annot_env = mkRoleAnnotEnv role_annots
inst_ds_map = mkInstDeclFreeVarsMap rdr_env tc_names instds_w_fvs
(init_inst_ds, rest_inst_ds) = getInsts [] inst_ds_map
first_group
| null init_inst_ds = []
| otherwise = [TyClGroup { group_tyclds = []
, group_roles = []
, group_instds = init_inst_ds }]
((final_inst_ds, orphan_roles), groups)
= mapAccumL mk_group (rest_inst_ds, role_annot_env) tycl_sccs
all_fvs = plusFV (foldr (plusFV . snd) emptyFVs tycls_w_fvs)
(foldr (plusFV . snd) emptyFVs instds_w_fvs)
all_groups = first_group ++ groups
; ASSERT2( null final_inst_ds, ppr instds_w_fvs $$ ppr inst_ds_map
$$ ppr (flattenSCCs tycl_sccs) $$ ppr final_inst_ds )
mapM_ orphanRoleAnnotErr (nameEnvElts orphan_roles)
; traceRn "rnTycl dependency analysis made groups" (ppr all_groups)
; return (all_groups, all_fvs) }
where
mk_group :: (InstDeclFreeVarsMap, RoleAnnotEnv)
-> SCC (LTyClDecl Name)
-> ( (InstDeclFreeVarsMap, RoleAnnotEnv)
, TyClGroup Name )
mk_group (inst_map, role_env) scc
= ((inst_map', role_env'), group)
where
tycl_ds = flattenSCC scc
bndrs = map (tcdName . unLoc) tycl_ds
(inst_ds, inst_map') = getInsts bndrs inst_map
(roles, role_env') = getRoleAnnots bndrs role_env
group = TyClGroup { group_tyclds = tycl_ds
, group_roles = roles
, group_instds = inst_ds }
depAnalTyClDecls :: GlobalRdrEnv
-> [(LTyClDecl Name, FreeVars)]
-> [SCC (LTyClDecl Name)]
-- See Note [Dependency analysis of type, class, and instance decls]
depAnalTyClDecls rdr_env ds_w_fvs
= stronglyConnCompFromEdgedVerticesUniq edges
where
edges = [ (d, tcdName (unLoc d), map (getParent rdr_env) (nonDetEltsUFM fvs))
| (d, fvs) <- ds_w_fvs ]
-- It's OK to use nonDetEltsUFM here as
-- stronglyConnCompFromEdgedVertices is still deterministic
-- even if the edges are in nondeterministic order as explained
-- in Note [Deterministic SCC] in Digraph.
toParents :: GlobalRdrEnv -> NameSet -> NameSet
toParents rdr_env ns
= nonDetFoldUFM add emptyNameSet ns
-- It's OK to use nonDetFoldUFM because we immediately forget the
-- ordering by creating a set
where
add n s = extendNameSet s (getParent rdr_env n)
getParent :: GlobalRdrEnv -> Name -> Name
getParent rdr_env n
= case lookupGRE_Name rdr_env n of
Just gre -> case gre_par gre of
ParentIs { par_is = p } -> p
FldParent { par_is = p } -> p
_ -> n
Nothing -> n
{- Note [Extra dependencies from .hs-boot files]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This is a long story, so buckle in.
**Dependencies via hs-boot files are not obvious.** Consider the following case:
A.hs-boot
module A where
data A1
B.hs
module B where
import {-# SOURCE #-} A
type B1 = A1
A.hs
module A where
import B
data A2 = MkA2 B1
data A1 = MkA1 A2
Here A2 is really recursive (via B1), but we won't see that easily when
doing dependency analysis when compiling A.hs. When we look at A2,
we see that its free variables are simply B1, but without (recursively) digging
into the definition of B1 will we see that it actually refers to A1 via an
hs-boot file.
**Recursive declarations, even those broken by an hs-boot file, need to
be type-checked together.** Whenever we refer to a declaration via
an hs-boot file, we must be careful not to force the TyThing too early:
ala Note [Tying the knot] if we force the TyThing before we have
defined it ourselves in the local type environment, GHC will error.
Conservatively, then, it would make sense that we to typecheck A1
and A2 from the previous example together, because the two types are
truly mutually recursive through B1.
If we are being clever, we might observe that while kind-checking
A2, we don't actually need to force the TyThing for A1: B1
independently records its kind, so there is no need to go "deeper".
But then we are in an uncomfortable situation where we have
constructed a TyThing for A2 before we have checked A1, and we
have to be absolutely certain we don't force it too deeply until
we get around to kind checking A1, which could be for a very long
time.
Indeed, with datatype promotion, we may very well need to look
at the type of MkA2 before we have kind-checked A1: consider,
data T = MkT (Proxy 'MkA2)
To promote MkA2, we need to lift its type to the kind level.
We never tested this, but it seems likely A1 would get poked
at this point.
**Here's what we do instead.** So it is expedient for us to
make sure A1 and A2 are kind checked together in a loop.
To ensure that our dependency analysis can catch this,
we add a dependency:
- from every local declaration
- to everything that comes from this module's .hs-boot file
(this is gotten from sb_tcs in the SelfBootInfo).
In this case, we'll add an edges
- from A1 to A2 (but that edge is there already)
- from A2 to A1 (which is new)
Well, not quite *every* declaration. Imagine module A
above had another datatype declaration:
data A3 = A3 Int
Even though A3 has a dependency (on Int), all its dependencies are from things
that live on other packages. Since we don't have mutual dependencies across
packages, it is safe not to add the dependencies on the .hs-boot stuff to A2.
Hence function nameIsHomePackageImport.
Note that this is fairly conservative: it essentially implies that
EVERY type declaration in this modules hs-boot file will be kind-checked
together in one giant loop (and furthermore makes every other type
in the module depend on this loop). This is perhaps less than ideal, because
the larger a recursive group, the less polymorphism available (we
cannot infer a type to be polymorphically instantiated while we
are inferring its kind), but no one has hollered about this (yet!)
-}
addBootDeps :: [(LTyClDecl Name, FreeVars)] -> RnM [(LTyClDecl Name, FreeVars)]
-- See Note [Extra dependencies from .hs-boot files]
addBootDeps ds_w_fvs
= do { tcg_env <- getGblEnv
; let this_mod = tcg_mod tcg_env
boot_info = tcg_self_boot tcg_env
add_boot_deps :: [(LTyClDecl Name, FreeVars)] -> [(LTyClDecl Name, FreeVars)]
add_boot_deps ds_w_fvs
= case boot_info of
SelfBoot { sb_tcs = tcs } | not (isEmptyNameSet tcs)
-> map (add_one tcs) ds_w_fvs
_ -> ds_w_fvs
add_one :: NameSet -> (LTyClDecl Name, FreeVars) -> (LTyClDecl Name, FreeVars)
add_one tcs pr@(decl,fvs)
| has_local_imports fvs = (decl, fvs `plusFV` tcs)
| otherwise = pr
has_local_imports fvs
= nameSetAny (nameIsHomePackageImport this_mod) fvs
; return (add_boot_deps ds_w_fvs) }
{- ******************************************************
* *
Role annotations
* *
****************************************************** -}
-- | Renames role annotations, returning them as the values in a NameEnv
-- and checks for duplicate role annotations.
-- It is quite convenient to do both of these in the same place.
-- See also Note [Role annotations in the renamer]
rnRoleAnnots :: NameSet
-> [LRoleAnnotDecl RdrName]
-> RnM [LRoleAnnotDecl Name]
rnRoleAnnots tc_names role_annots
= do { -- Check for duplicates *before* renaming, to avoid
-- lumping together all the unboundNames
let (no_dups, dup_annots) = removeDups role_annots_cmp role_annots
role_annots_cmp (L _ annot1) (L _ annot2)
= roleAnnotDeclName annot1 `compare` roleAnnotDeclName annot2
; mapM_ dupRoleAnnotErr dup_annots
; mapM (wrapLocM rn_role_annot1) no_dups }
where
rn_role_annot1 (RoleAnnotDecl tycon roles)
= do { -- the name is an *occurrence*, but look it up only in the
-- decls defined in this group (see #10263)
tycon' <- lookupSigCtxtOccRn (RoleAnnotCtxt tc_names)
(text "role annotation")
tycon
; return $ RoleAnnotDecl tycon' roles }
dupRoleAnnotErr :: [LRoleAnnotDecl RdrName] -> RnM ()
dupRoleAnnotErr [] = panic "dupRoleAnnotErr"
dupRoleAnnotErr list
= addErrAt loc $
hang (text "Duplicate role annotations for" <+>
quotes (ppr $ roleAnnotDeclName first_decl) <> colon)
2 (vcat $ map pp_role_annot sorted_list)
where
sorted_list = sortBy cmp_annot list
(L loc first_decl : _) = sorted_list
pp_role_annot (L loc decl) = hang (ppr decl)
4 (text "-- written at" <+> ppr loc)
cmp_annot (L loc1 _) (L loc2 _) = loc1 `compare` loc2
orphanRoleAnnotErr :: LRoleAnnotDecl Name -> RnM ()
orphanRoleAnnotErr (L loc decl)
= addErrAt loc $
hang (text "Role annotation for a type previously declared:")
2 (ppr decl) $$
parens (text "The role annotation must be given where" <+>
quotes (ppr $ roleAnnotDeclName decl) <+>
text "is declared.")
{- Note [Role annotations in the renamer]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We must ensure that a type's role annotation is put in the same group as the
proper type declaration. This is because role annotations are needed during
type-checking when creating the type's TyCon. So, rnRoleAnnots builds a
NameEnv (LRoleAnnotDecl Name) that maps a name to a role annotation for that
type, if any. Then, this map can be used to add the role annotations to the
groups after dependency analysis.
This process checks for duplicate role annotations, where we must be careful
to do the check *before* renaming to avoid calling all unbound names duplicates
of one another.
The renaming process, as usual, might identify and report errors for unbound
names. We exclude the annotations for unbound names in the annotation
environment to avoid spurious errors for orphaned annotations.
We then (in rnTyClDecls) do a check for orphan role annotations (role
annotations without an accompanying type decl). The check works by folding
over components (of type [[Either (TyClDecl Name) (InstDecl Name)]]), selecting
out the relevant role declarations for each group, as well as diminishing the
annotation environment. After the fold is complete, anything left over in the
name environment must be an orphan, and errors are generated.
An earlier version of this algorithm short-cut the orphan check by renaming
only with names declared in this module. But, this check is insufficient in
the case of staged module compilation (Template Haskell, GHCi).
See #8485. With the new lookup process (which includes types declared in other
modules), we get better error messages, too.
-}
{- ******************************************************
* *
Dependency info for instances
* *
****************************************************** -}
----------------------------------------------------------
-- | 'InstDeclFreeVarsMap is an association of an
-- @InstDecl@ with @FreeVars@. The @FreeVars@ are
-- the tycon names that are both
-- a) free in the instance declaration
-- b) bound by this group of type/class/instance decls
type InstDeclFreeVarsMap = [(LInstDecl Name, FreeVars)]
-- | Construct an @InstDeclFreeVarsMap@ by eliminating any @Name@s from the
-- @FreeVars@ which are *not* the binders of a @TyClDecl@.
mkInstDeclFreeVarsMap :: GlobalRdrEnv
-> NameSet
-> [(LInstDecl Name, FreeVars)]
-> InstDeclFreeVarsMap
mkInstDeclFreeVarsMap rdr_env tycl_bndrs inst_ds_fvs
= [ (inst_decl, toParents rdr_env fvs `intersectFVs` tycl_bndrs)
| (inst_decl, fvs) <- inst_ds_fvs ]
-- | Get the @LInstDecl@s which have empty @FreeVars@ sets, and the
-- @InstDeclFreeVarsMap@ with these entries removed.
-- We call (getInsts tcs instd_map) when we've completed the declarations
-- for 'tcs'. The call returns (inst_decls, instd_map'), where
-- inst_decls are the instance declarations all of
-- whose free vars are now defined
-- instd_map' is the inst-decl map with 'tcs' removed from
-- the free-var set
getInsts :: [Name] -> InstDeclFreeVarsMap -> ([LInstDecl Name], InstDeclFreeVarsMap)
getInsts bndrs inst_decl_map
= partitionWith pick_me inst_decl_map
where
pick_me :: (LInstDecl Name, FreeVars)
-> Either (LInstDecl Name) (LInstDecl Name, FreeVars)
pick_me (decl, fvs)
| isEmptyNameSet depleted_fvs = Left decl
| otherwise = Right (decl, depleted_fvs)
where
depleted_fvs = delFVs bndrs fvs
{- ******************************************************
* *
Renaming a type or class declaration
* *
****************************************************** -}
rnTyClDecl :: TyClDecl RdrName
-> RnM (TyClDecl Name, FreeVars)
-- All flavours of type family declarations ("type family", "newtype family",
-- and "data family"), both top level and (for an associated type)
-- in a class decl
rnTyClDecl (FamDecl { tcdFam = decl })
= do { (decl', fvs) <- rnFamDecl Nothing decl
; return (FamDecl decl', fvs) }
rnTyClDecl (SynDecl { tcdLName = tycon, tcdTyVars = tyvars,
tcdFixity = fixity, tcdRhs = rhs })
= do { tycon' <- lookupLocatedTopBndrRn tycon
; kvs <- freeKiTyVarsKindVars <$> extractHsTyRdrTyVars rhs
; let doc = TySynCtx tycon
; traceRn "rntycl-ty" (ppr tycon <+> ppr kvs)
; ((tyvars', rhs'), fvs) <- bindHsQTyVars doc Nothing Nothing kvs tyvars $
\ tyvars' _ ->
do { (rhs', fvs) <- rnTySyn doc rhs
; return ((tyvars', rhs'), fvs) }
; return (SynDecl { tcdLName = tycon', tcdTyVars = tyvars'
, tcdFixity = fixity
, tcdRhs = rhs', tcdFVs = fvs }, fvs) }
-- "data", "newtype" declarations
-- both top level and (for an associated type) in an instance decl
rnTyClDecl (DataDecl { tcdLName = tycon, tcdTyVars = tyvars,
tcdFixity = fixity, tcdDataDefn = defn })
= do { tycon' <- lookupLocatedTopBndrRn tycon
; kvs <- extractDataDefnKindVars defn
; let doc = TyDataCtx tycon
; traceRn "rntycl-data" (ppr tycon <+> ppr kvs)
; ((tyvars', defn', no_kvs), fvs)
<- bindHsQTyVars doc Nothing Nothing kvs tyvars $ \ tyvars' dep_vars ->
do { ((defn', kind_sig_fvs), fvs) <- rnDataDefn doc defn
; let sig_tvs = filterNameSet isTyVarName kind_sig_fvs
unbound_sig_tvs = sig_tvs `minusNameSet` dep_vars
; return ((tyvars', defn', isEmptyNameSet unbound_sig_tvs), fvs) }
-- See Note [Complete user-supplied kind signatures] in HsDecls
; typeintype <- xoptM LangExt.TypeInType
; let cusk = hsTvbAllKinded tyvars' &&
(not typeintype || no_kvs)
; return (DataDecl { tcdLName = tycon', tcdTyVars = tyvars'
, tcdFixity = fixity
, tcdDataDefn = defn', tcdDataCusk = cusk
, tcdFVs = fvs }, fvs) }
rnTyClDecl (ClassDecl { tcdCtxt = context, tcdLName = lcls,
tcdTyVars = tyvars, tcdFixity = fixity,
tcdFDs = fds, tcdSigs = sigs,
tcdMeths = mbinds, tcdATs = ats, tcdATDefs = at_defs,
tcdDocs = docs})
= do { lcls' <- lookupLocatedTopBndrRn lcls
; let cls' = unLoc lcls'
kvs = [] -- No scoped kind vars except those in
-- kind signatures on the tyvars
-- Tyvars scope over superclass context and method signatures
; ((tyvars', context', fds', ats'), stuff_fvs)
<- bindHsQTyVars cls_doc Nothing Nothing kvs tyvars $ \ tyvars' _ -> do
-- Checks for distinct tyvars
{ (context', cxt_fvs) <- rnContext cls_doc context
; fds' <- rnFds fds
-- The fundeps have no free variables
; (ats', fv_ats) <- rnATDecls cls' ats
; let fvs = cxt_fvs `plusFV`
fv_ats
; return ((tyvars', context', fds', ats'), fvs) }
; (at_defs', fv_at_defs) <- rnList (rnTyFamDefltEqn cls') at_defs
-- No need to check for duplicate associated type decls
-- since that is done by RnNames.extendGlobalRdrEnvRn
-- Check the signatures
-- First process the class op sigs (op_sigs), then the fixity sigs (non_op_sigs).
; let sig_rdr_names_w_locs = [op | L _ (ClassOpSig False ops _) <- sigs
, op <- ops]
; checkDupRdrNames sig_rdr_names_w_locs
-- Typechecker is responsible for checking that we only
-- give default-method bindings for things in this class.
-- The renamer *could* check this for class decls, but can't
-- for instance decls.
-- The newLocals call is tiresome: given a generic class decl
-- class C a where
-- op :: a -> a
-- op {| x+y |} (Inl a) = ...
-- op {| x+y |} (Inr b) = ...
-- op {| a*b |} (a*b) = ...
-- we want to name both "x" tyvars with the same unique, so that they are
-- easy to group together in the typechecker.
; (mbinds', sigs', meth_fvs)
<- rnMethodBinds True cls' (hsAllLTyVarNames tyvars') mbinds sigs
-- No need to check for duplicate method signatures
-- since that is done by RnNames.extendGlobalRdrEnvRn
-- and the methods are already in scope
-- Haddock docs
; docs' <- mapM (wrapLocM rnDocDecl) docs
; let all_fvs = meth_fvs `plusFV` stuff_fvs `plusFV` fv_at_defs
; return (ClassDecl { tcdCtxt = context', tcdLName = lcls',
tcdTyVars = tyvars', tcdFixity = fixity,
tcdFDs = fds', tcdSigs = sigs',
tcdMeths = mbinds', tcdATs = ats', tcdATDefs = at_defs',
tcdDocs = docs', tcdFVs = all_fvs },
all_fvs ) }
where
cls_doc = ClassDeclCtx lcls
-- "type" and "type instance" declarations
rnTySyn :: HsDocContext -> LHsType RdrName -> RnM (LHsType Name, FreeVars)
rnTySyn doc rhs = rnLHsType doc rhs
rnDataDefn :: HsDocContext -> HsDataDefn RdrName
-> RnM ((HsDataDefn Name, NameSet), FreeVars)
-- the NameSet includes all Names free in the kind signature
-- See Note [Complete user-supplied kind signatures]
rnDataDefn doc (HsDataDefn { dd_ND = new_or_data, dd_cType = cType
, dd_ctxt = context, dd_cons = condecls
, dd_kindSig = m_sig, dd_derivs = derivs })
= do { checkTc (h98_style || null (unLoc context))
(badGadtStupidTheta doc)
; (m_sig', sig_fvs) <- case m_sig of
Just sig -> first Just <$> rnLHsKind doc sig
Nothing -> return (Nothing, emptyFVs)
; (context', fvs1) <- rnContext doc context
; (derivs', fvs3) <- rn_derivs derivs
-- For the constructor declarations, drop the LocalRdrEnv
-- in the GADT case, where the type variables in the declaration
-- do not scope over the constructor signatures
-- data T a where { T1 :: forall b. b-> b }
; let { zap_lcl_env | h98_style = \ thing -> thing
| otherwise = setLocalRdrEnv emptyLocalRdrEnv }
; (condecls', con_fvs) <- zap_lcl_env $ rnConDecls condecls
-- No need to check for duplicate constructor decls
-- since that is done by RnNames.extendGlobalRdrEnvRn
; let all_fvs = fvs1 `plusFV` fvs3 `plusFV`
con_fvs `plusFV` sig_fvs
; return (( HsDataDefn { dd_ND = new_or_data, dd_cType = cType
, dd_ctxt = context', dd_kindSig = m_sig'
, dd_cons = condecls'
, dd_derivs = derivs' }
, sig_fvs )
, all_fvs )
}
where
h98_style = case condecls of -- Note [Stupid theta]
L _ (ConDeclGADT {}) : _ -> False
_ -> True
rn_derivs (L loc ds)
= do { deriv_strats_ok <- xoptM LangExt.DerivingStrategies
; failIfTc (lengthExceeds ds 1 && not deriv_strats_ok)
multipleDerivClausesErr
; (ds', fvs) <- mapFvRn (rnLHsDerivingClause deriv_strats_ok doc) ds
; return (L loc ds', fvs) }
rnLHsDerivingClause :: Bool -> HsDocContext -> LHsDerivingClause RdrName
-> RnM (LHsDerivingClause Name, FreeVars)
rnLHsDerivingClause deriv_strats_ok doc
(L loc (HsDerivingClause { deriv_clause_strategy = dcs
, deriv_clause_tys = L loc' dct }))
= do { failIfTc (isJust dcs && not deriv_strats_ok) $
illegalDerivStrategyErr $ fmap unLoc dcs
; (dct', fvs) <- mapFvRn (rnHsSigType doc) dct
; return ( L loc (HsDerivingClause { deriv_clause_strategy = dcs
, deriv_clause_tys = L loc' dct' })
, fvs ) }
badGadtStupidTheta :: HsDocContext -> SDoc
badGadtStupidTheta _
= vcat [text "No context is allowed on a GADT-style data declaration",
text "(You can put a context on each constructor, though.)"]
illegalDerivStrategyErr :: Maybe DerivStrategy -> SDoc
illegalDerivStrategyErr ds
= vcat [ text "Illegal deriving strategy" <> colon <+> maybe empty ppr ds
, text "Use DerivingStrategies to enable this extension" ]
multipleDerivClausesErr :: SDoc
multipleDerivClausesErr
= vcat [ text "Illegal use of multiple, consecutive deriving clauses"
, text "Use DerivingStrategies to allow this" ]
rnFamDecl :: Maybe Name -- Just cls => this FamilyDecl is nested
-- inside an *class decl* for cls
-- used for associated types
-> FamilyDecl RdrName
-> RnM (FamilyDecl Name, FreeVars)
rnFamDecl mb_cls (FamilyDecl { fdLName = tycon, fdTyVars = tyvars
, fdFixity = fixity
, fdInfo = info, fdResultSig = res_sig
, fdInjectivityAnn = injectivity })
= do { tycon' <- lookupLocatedTopBndrRn tycon
; kvs <- extractRdrKindSigVars res_sig
; ((tyvars', res_sig', injectivity'), fv1) <-
bindHsQTyVars doc Nothing mb_cls kvs tyvars $
\ tyvars'@(HsQTvs { hsq_implicit = rn_kvs }) _ ->
do { let rn_sig = rnFamResultSig doc rn_kvs
; (res_sig', fv_kind) <- wrapLocFstM rn_sig res_sig
; injectivity' <- traverse (rnInjectivityAnn tyvars' res_sig')
injectivity
; return ( (tyvars', res_sig', injectivity') , fv_kind ) }
; (info', fv2) <- rn_info info
; return (FamilyDecl { fdLName = tycon', fdTyVars = tyvars'
, fdFixity = fixity
, fdInfo = info', fdResultSig = res_sig'
, fdInjectivityAnn = injectivity' }
, fv1 `plusFV` fv2) }
where
doc = TyFamilyCtx tycon
----------------------
rn_info (ClosedTypeFamily (Just eqns))
= do { (eqns', fvs) <- rnList (rnTyFamInstEqn Nothing) eqns
-- no class context,
; return (ClosedTypeFamily (Just eqns'), fvs) }
rn_info (ClosedTypeFamily Nothing)
= return (ClosedTypeFamily Nothing, emptyFVs)
rn_info OpenTypeFamily = return (OpenTypeFamily, emptyFVs)
rn_info DataFamily = return (DataFamily, emptyFVs)
rnFamResultSig :: HsDocContext
-> [Name] -- kind variables already in scope
-> FamilyResultSig RdrName
-> RnM (FamilyResultSig Name, FreeVars)
rnFamResultSig _ _ NoSig
= return (NoSig, emptyFVs)
rnFamResultSig doc _ (KindSig kind)
= do { (rndKind, ftvs) <- rnLHsKind doc kind
; return (KindSig rndKind, ftvs) }
rnFamResultSig doc kv_names (TyVarSig tvbndr)
= do { -- `TyVarSig` tells us that user named the result of a type family by
-- writing `= tyvar` or `= (tyvar :: kind)`. In such case we want to
-- be sure that the supplied result name is not identical to an
-- already in-scope type variable from an enclosing class.
--
-- Example of disallowed declaration:
-- class C a b where
-- type F b = a | a -> b
rdr_env <- getLocalRdrEnv
; let resName = hsLTyVarName tvbndr
; when (resName `elemLocalRdrEnv` rdr_env) $
addErrAt (getLoc tvbndr) $
(hsep [ text "Type variable", quotes (ppr resName) <> comma
, text "naming a type family result,"
] $$
text "shadows an already bound type variable")
; bindLHsTyVarBndr doc Nothing -- this might be a lie, but it's used for
-- scoping checks that are irrelevant here
(mkNameSet kv_names) emptyNameSet
-- use of emptyNameSet here avoids
-- redundant duplicate errors
tvbndr $ \ _ _ tvbndr' ->
return (TyVarSig tvbndr', unitFV (hsLTyVarName tvbndr')) }
-- Note [Renaming injectivity annotation]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- During renaming of injectivity annotation we have to make several checks to
-- make sure that it is well-formed. At the moment injectivity annotation
-- consists of a single injectivity condition, so the terms "injectivity
-- annotation" and "injectivity condition" might be used interchangeably. See
-- Note [Injectivity annotation] for a detailed discussion of currently allowed
-- injectivity annotations.
--
-- Checking LHS is simple because the only type variable allowed on the LHS of
-- injectivity condition is the variable naming the result in type family head.
-- Example of disallowed annotation:
--
-- type family Foo a b = r | b -> a
--
-- Verifying RHS of injectivity consists of checking that:
--
-- 1. only variables defined in type family head appear on the RHS (kind
-- variables are also allowed). Example of disallowed annotation:
--
-- type family Foo a = r | r -> b
--
-- 2. for associated types the result variable does not shadow any of type
-- class variables. Example of disallowed annotation:
--
-- class Foo a b where
-- type F a = b | b -> a
--
-- Breaking any of these assumptions results in an error.
-- | Rename injectivity annotation. Note that injectivity annotation is just the
-- part after the "|". Everything that appears before it is renamed in
-- rnFamDecl.
rnInjectivityAnn :: LHsQTyVars Name -- ^ Type variables declared in
-- type family head
-> LFamilyResultSig Name -- ^ Result signature
-> LInjectivityAnn RdrName -- ^ Injectivity annotation
-> RnM (LInjectivityAnn Name)
rnInjectivityAnn tvBndrs (L _ (TyVarSig resTv))
(L srcSpan (InjectivityAnn injFrom injTo))
= do
{ (injDecl'@(L _ (InjectivityAnn injFrom' injTo')), noRnErrors)
<- askNoErrs $
bindLocalNames [hsLTyVarName resTv] $
-- The return type variable scopes over the injectivity annotation
-- e.g. type family F a = (r::*) | r -> a
do { injFrom' <- rnLTyVar injFrom
; injTo' <- mapM rnLTyVar injTo
; return $ L srcSpan (InjectivityAnn injFrom' injTo') }
; let tvNames = Set.fromList $ hsAllLTyVarNames tvBndrs
resName = hsLTyVarName resTv
-- See Note [Renaming injectivity annotation]
lhsValid = EQ == (stableNameCmp resName (unLoc injFrom'))
rhsValid = Set.fromList (map unLoc injTo') `Set.difference` tvNames
-- if renaming of type variables ended with errors (eg. there were
-- not-in-scope variables) don't check the validity of injectivity
-- annotation. This gives better error messages.
; when (noRnErrors && not lhsValid) $
addErrAt (getLoc injFrom)
( vcat [ text $ "Incorrect type variable on the LHS of "
++ "injectivity condition"
, nest 5
( vcat [ text "Expected :" <+> ppr resName
, text "Actual :" <+> ppr injFrom ])])
; when (noRnErrors && not (Set.null rhsValid)) $
do { let errorVars = Set.toList rhsValid
; addErrAt srcSpan $ ( hsep
[ text "Unknown type variable" <> plural errorVars
, text "on the RHS of injectivity condition:"
, interpp'SP errorVars ] ) }
; return injDecl' }
-- We can only hit this case when the user writes injectivity annotation without
-- naming the result:
--
-- type family F a | result -> a
-- type family F a :: * | result -> a
--
-- So we rename injectivity annotation like we normally would except that
-- this time we expect "result" to be reported not in scope by rnLTyVar.
rnInjectivityAnn _ _ (L srcSpan (InjectivityAnn injFrom injTo)) =
setSrcSpan srcSpan $ do
(injDecl', _) <- askNoErrs $ do
injFrom' <- rnLTyVar injFrom
injTo' <- mapM rnLTyVar injTo
return $ L srcSpan (InjectivityAnn injFrom' injTo')
return $ injDecl'
{-
Note [Stupid theta]
~~~~~~~~~~~~~~~~~~~
Trac #3850 complains about a regression wrt 6.10 for
data Show a => T a
There is no reason not to allow the stupid theta if there are no data
constructors. It's still stupid, but does no harm, and I don't want
to cause programs to break unnecessarily (notably HList). So if there
are no data constructors we allow h98_style = True
-}
{- *****************************************************
* *
Support code for type/data declarations
* *
***************************************************** -}
---------------
badAssocRhs :: [Name] -> RnM ()
badAssocRhs ns
= addErr (hang (text "The RHS of an associated type declaration mentions"
<+> pprWithCommas (quotes . ppr) ns)
2 (text "All such variables must be bound on the LHS"))
-----------------
rnConDecls :: [LConDecl RdrName] -> RnM ([LConDecl Name], FreeVars)
rnConDecls = mapFvRn (wrapLocFstM rnConDecl)
rnConDecl :: ConDecl RdrName -> RnM (ConDecl Name, FreeVars)
rnConDecl decl@(ConDeclH98 { con_name = name, con_qvars = qtvs
, con_cxt = mcxt, con_details = details
, con_doc = mb_doc })
= do { _ <- addLocM checkConName name
; new_name <- lookupLocatedTopBndrRn name
; let doc = ConDeclCtx [new_name]
; mb_doc' <- rnMbLHsDoc mb_doc
; (kvs, qtvs') <- get_con_qtvs (hsConDeclArgTys details)
; bindHsQTyVars doc (Just $ inHsDocContext doc) Nothing kvs qtvs' $
\new_tyvars _ -> do
{ (new_context, fvs1) <- case mcxt of
Nothing -> return (Nothing,emptyFVs)
Just lcxt -> do { (lctx',fvs) <- rnContext doc lcxt
; return (Just lctx',fvs) }
; (new_details, fvs2) <- rnConDeclDetails (unLoc new_name) doc details
; let (new_details',fvs3) = (new_details,emptyFVs)
; traceRn "rnConDecl" (ppr name <+> vcat
[ text "free_kvs:" <+> ppr kvs
, text "qtvs:" <+> ppr qtvs
, text "qtvs':" <+> ppr qtvs' ])
; let all_fvs = fvs1 `plusFV` fvs2 `plusFV` fvs3
new_tyvars' = case qtvs of
Nothing -> Nothing
Just _ -> Just new_tyvars
; return (decl { con_name = new_name, con_qvars = new_tyvars'
, con_cxt = new_context, con_details = new_details'
, con_doc = mb_doc' },
all_fvs) }}
where
cxt = maybe [] unLoc mcxt
get_rdr_tvs tys = extractHsTysRdrTyVars (cxt ++ tys)
get_con_qtvs :: [LHsType RdrName]
-> RnM ([Located RdrName], LHsQTyVars RdrName)
get_con_qtvs arg_tys
| Just tvs <- qtvs -- data T = forall a. MkT (a -> a)
= do { free_vars <- get_rdr_tvs arg_tys
; return (freeKiTyVarsKindVars free_vars, tvs) }
| otherwise -- data T = MkT (a -> a)
= return ([], mkHsQTvs [])
rnConDecl decl@(ConDeclGADT { con_names = names, con_type = ty
, con_doc = mb_doc })
= do { mapM_ (addLocM checkConName) names
; new_names <- mapM lookupLocatedTopBndrRn names
; let doc = ConDeclCtx new_names
; mb_doc' <- rnMbLHsDoc mb_doc
; (ty', fvs) <- rnHsSigType doc ty
; traceRn "rnConDecl" (ppr names <+> vcat
[ text "fvs:" <+> ppr fvs ])
; return (decl { con_names = new_names, con_type = ty'
, con_doc = mb_doc' },
fvs) }
rnConDeclDetails
:: Name
-> HsDocContext
-> HsConDetails (LHsType RdrName) (Located [LConDeclField RdrName])
-> RnM (HsConDetails (LHsType Name) (Located [LConDeclField Name]), FreeVars)
rnConDeclDetails _ doc (PrefixCon tys)
= do { (new_tys, fvs) <- rnLHsTypes doc tys
; return (PrefixCon new_tys, fvs) }
rnConDeclDetails _ doc (InfixCon ty1 ty2)
= do { (new_ty1, fvs1) <- rnLHsType doc ty1
; (new_ty2, fvs2) <- rnLHsType doc ty2
; return (InfixCon new_ty1 new_ty2, fvs1 `plusFV` fvs2) }
rnConDeclDetails con doc (RecCon (L l fields))
= do { fls <- lookupConstructorFields con
; (new_fields, fvs) <- rnConDeclFields doc fls fields
-- No need to check for duplicate fields
-- since that is done by RnNames.extendGlobalRdrEnvRn
; return (RecCon (L l new_fields), fvs) }
-------------------------------------------------
-- | Brings pattern synonym names and also pattern synonym selectors
-- from record pattern synonyms into scope.
extendPatSynEnv :: HsValBinds RdrName -> MiniFixityEnv
-> ([Name] -> TcRnIf TcGblEnv TcLclEnv a) -> TcM a
extendPatSynEnv val_decls local_fix_env thing = do {
names_with_fls <- new_ps val_decls
; let pat_syn_bndrs = concat [ name: map flSelector fields
| (name, fields) <- names_with_fls ]
; let avails = map avail pat_syn_bndrs
; (gbl_env, lcl_env) <- extendGlobalRdrEnvRn avails local_fix_env
; let field_env' = extendNameEnvList (tcg_field_env gbl_env) names_with_fls
final_gbl_env = gbl_env { tcg_field_env = field_env' }
; setEnvs (final_gbl_env, lcl_env) (thing pat_syn_bndrs) }
where
new_ps :: HsValBinds RdrName -> TcM [(Name, [FieldLabel])]
new_ps (ValBindsIn binds _) = foldrBagM new_ps' [] binds
new_ps _ = panic "new_ps"
new_ps' :: LHsBindLR RdrName RdrName
-> [(Name, [FieldLabel])]
-> TcM [(Name, [FieldLabel])]
new_ps' bind names
| L bind_loc (PatSynBind (PSB { psb_id = L _ n
, psb_args = RecordPatSyn as })) <- bind
= do
bnd_name <- newTopSrcBinder (L bind_loc n)
let rnames = map recordPatSynSelectorId as
mkFieldOcc :: Located RdrName -> LFieldOcc RdrName
mkFieldOcc (L l name) = L l (FieldOcc (L l name) PlaceHolder)
field_occs = map mkFieldOcc rnames
flds <- mapM (newRecordSelector False [bnd_name]) field_occs
return ((bnd_name, flds): names)
| L bind_loc (PatSynBind (PSB { psb_id = L _ n})) <- bind
= do
bnd_name <- newTopSrcBinder (L bind_loc n)
return ((bnd_name, []): names)
| otherwise
= return names
{-
*********************************************************
* *
\subsection{Support code to rename types}
* *
*********************************************************
-}
rnFds :: [Located (FunDep (Located RdrName))]
-> RnM [Located (FunDep (Located Name))]
rnFds fds
= mapM (wrapLocM rn_fds) fds
where
rn_fds (tys1, tys2)
= do { tys1' <- rnHsTyVars tys1
; tys2' <- rnHsTyVars tys2
; return (tys1', tys2') }
rnHsTyVars :: [Located RdrName] -> RnM [Located Name]
rnHsTyVars tvs = mapM rnHsTyVar tvs
rnHsTyVar :: Located RdrName -> RnM (Located Name)
rnHsTyVar (L l tyvar) = do
tyvar' <- lookupOccRn tyvar
return (L l tyvar')
{-
*********************************************************
* *
findSplice
* *
*********************************************************
This code marches down the declarations, looking for the first
Template Haskell splice. As it does so it
a) groups the declarations into a HsGroup
b) runs any top-level quasi-quotes
-}
findSplice :: [LHsDecl RdrName] -> RnM (HsGroup RdrName, Maybe (SpliceDecl RdrName, [LHsDecl RdrName]))
findSplice ds = addl emptyRdrGroup ds
addl :: HsGroup RdrName -> [LHsDecl RdrName]
-> RnM (HsGroup RdrName, Maybe (SpliceDecl RdrName, [LHsDecl RdrName]))
-- This stuff reverses the declarations (again) but it doesn't matter
addl gp [] = return (gp, Nothing)
addl gp (L l d : ds) = add gp l d ds
add :: HsGroup RdrName -> SrcSpan -> HsDecl RdrName -> [LHsDecl RdrName]
-> RnM (HsGroup RdrName, Maybe (SpliceDecl RdrName, [LHsDecl RdrName]))
-- #10047: Declaration QuasiQuoters are expanded immediately, without
-- causing a group split
add gp _ (SpliceD (SpliceDecl (L _ qq@HsQuasiQuote{}) _)) ds
= do { (ds', _) <- rnTopSpliceDecls qq
; addl gp (ds' ++ ds)
}
add gp loc (SpliceD splice@(SpliceDecl _ flag)) ds
= do { -- We've found a top-level splice. If it is an *implicit* one
-- (i.e. a naked top level expression)
case flag of
ExplicitSplice -> return ()
ImplicitSplice -> do { th_on <- xoptM LangExt.TemplateHaskell
; unless th_on $ setSrcSpan loc $
failWith badImplicitSplice }
; return (gp, Just (splice, ds)) }
where
badImplicitSplice = text "Parse error: module header, import declaration"
$$ text "or top-level declaration expected."
-- Class declarations: pull out the fixity signatures to the top
add gp@(HsGroup {hs_tyclds = ts, hs_fixds = fs}) l (TyClD d) ds
| isClassDecl d
= let fsigs = [ L l f | L l (FixSig f) <- tcdSigs d ] in
addl (gp { hs_tyclds = add_tycld (L l d) ts, hs_fixds = fsigs ++ fs}) ds
| otherwise
= addl (gp { hs_tyclds = add_tycld (L l d) ts }) ds
-- Signatures: fixity sigs go a different place than all others
add gp@(HsGroup {hs_fixds = ts}) l (SigD (FixSig f)) ds
= addl (gp {hs_fixds = L l f : ts}) ds
add gp@(HsGroup {hs_valds = ts}) l (SigD d) ds
= addl (gp {hs_valds = add_sig (L l d) ts}) ds
-- Value declarations: use add_bind
add gp@(HsGroup {hs_valds = ts}) l (ValD d) ds
= addl (gp { hs_valds = add_bind (L l d) ts }) ds
-- Role annotations: added to the TyClGroup
add gp@(HsGroup {hs_tyclds = ts}) l (RoleAnnotD d) ds
= addl (gp { hs_tyclds = add_role_annot (L l d) ts }) ds
-- NB instance declarations go into TyClGroups. We throw them into the first
-- group, just as we do for the TyClD case. The renamer will go on to group
-- and order them later.
add gp@(HsGroup {hs_tyclds = ts}) l (InstD d) ds
= addl (gp { hs_tyclds = add_instd (L l d) ts }) ds
-- The rest are routine
add gp@(HsGroup {hs_derivds = ts}) l (DerivD d) ds
= addl (gp { hs_derivds = L l d : ts }) ds
add gp@(HsGroup {hs_defds = ts}) l (DefD d) ds
= addl (gp { hs_defds = L l d : ts }) ds
add gp@(HsGroup {hs_fords = ts}) l (ForD d) ds
= addl (gp { hs_fords = L l d : ts }) ds
add gp@(HsGroup {hs_warnds = ts}) l (WarningD d) ds
= addl (gp { hs_warnds = L l d : ts }) ds
add gp@(HsGroup {hs_annds = ts}) l (AnnD d) ds
= addl (gp { hs_annds = L l d : ts }) ds
add gp@(HsGroup {hs_ruleds = ts}) l (RuleD d) ds
= addl (gp { hs_ruleds = L l d : ts }) ds
add gp@(HsGroup {hs_vects = ts}) l (VectD d) ds
= addl (gp { hs_vects = L l d : ts }) ds
add gp l (DocD d) ds
= addl (gp { hs_docs = (L l d) : (hs_docs gp) }) ds
add_tycld :: LTyClDecl a -> [TyClGroup a] -> [TyClGroup a]
add_tycld d [] = [TyClGroup { group_tyclds = [d]
, group_roles = []
, group_instds = []
}
]
add_tycld d (ds@(TyClGroup { group_tyclds = tyclds }):dss)
= ds { group_tyclds = d : tyclds } : dss
add_instd :: LInstDecl a -> [TyClGroup a] -> [TyClGroup a]
add_instd d [] = [TyClGroup { group_tyclds = []
, group_roles = []
, group_instds = [d]
}
]
add_instd d (ds@(TyClGroup { group_instds = instds }):dss)
= ds { group_instds = d : instds } : dss
add_role_annot :: LRoleAnnotDecl a -> [TyClGroup a] -> [TyClGroup a]
add_role_annot d [] = [TyClGroup { group_tyclds = []
, group_roles = [d]
, group_instds = []
}
]
add_role_annot d (tycls@(TyClGroup { group_roles = roles }) : rest)
= tycls { group_roles = d : roles } : rest
add_bind :: LHsBind a -> HsValBinds a -> HsValBinds a
add_bind b (ValBindsIn bs sigs) = ValBindsIn (bs `snocBag` b) sigs
add_bind _ (ValBindsOut {}) = panic "RdrHsSyn:add_bind"
add_sig :: LSig a -> HsValBinds a -> HsValBinds a
add_sig s (ValBindsIn bs sigs) = ValBindsIn bs (s:sigs)
add_sig _ (ValBindsOut {}) = panic "RdrHsSyn:add_sig"
|
olsner/ghc
|
compiler/rename/RnSource.hs
|
bsd-3-clause
| 98,600 | 1 | 25 | 30,858 | 17,858 | 9,447 | 8,411 | 1,137 | 8 |
--------------------------------------------------------------------------------
{-|
Module : OpenGL
Copyright : (c) Daan Leijen 2003
License : wxWindows
Maintainer : [email protected]
Stability : provisional
Portability : portable
Convenience wrappers for the openGL canvas window ('GLCanvas').
-}
--------------------------------------------------------------------------------
module Graphics.UI.WXCore.OpenGL
(
-- * Types
GLAttribute(..)
-- * Creation
, glCanvasCreateDefault
, glCanvasCreateEx
) where
import Graphics.UI.WXCore.WxcTypes
import Graphics.UI.WXCore.WxcClasses
import Graphics.UI.WXCore.Types
import Foreign
{----------------------------------------------------------
Attributes
----------------------------------------------------------}
-- | OpenGL window ('GLCanvas') attributes.
data GLAttribute
= GL_RGBA -- ^ Use true colour
| GL_BUFFER_SIZE Int -- ^ Bits for buffer if not 'GL_RGBA' defined also
| GL_LEVEL Ordering -- ^ 'EQ' for main buffer, 'GT' for overlay, 'LT' for underlay
| GL_DOUBLEBUFFER -- ^ Use doublebuffer
| GL_STEREO -- ^ Use stereoscopic display
| GL_AUX_BUFFERS Int -- ^ Number of auxiliary buffers (not all implementation support this option)
| GL_MIN_RED Int -- ^ Use red buffer with at least /argument/ bits
| GL_MIN_GREEN Int -- ^ Use green buffer with at least /argument/ bits
| GL_MIN_BLUE Int -- ^ Use blue buffer with at least /argument/ bits
| GL_MIN_ALPHA Int -- ^ Use alpha buffer with at least /argument/ bits
| GL_DEPTH_SIZE Int -- ^ Bits for Z-buffer (0,16,32)
| GL_STENCIL_SIZE Int -- ^ Bits for stencil buffer
| GL_MIN_ACCUM_RED Int -- ^ Use red accumulation buffer with at least /argument/ bits
| GL_MIN_ACCUM_GREEN Int -- ^ Use green accumulation buffer with at least /argument/ bits
| GL_MIN_ACCUM_BLUE Int -- ^ Use blue accumulation buffer with at least /argument/ bits
| GL_MIN_ACCUM_ALPHA Int -- ^ Use alpha accumulation buffer with at least /argument/ bits
| GL_SAMPLE_BUFFERS Int -- ^ 1 for multisampling support (antialiasing)
| GL_SAMPLES Int -- ^ 4 for 2x2 antialiasing supersampling on most graphics cards
| GL_CORE_PROFILE -- ^ request an OpenGL core profile. This will result in also requesting OpenGL at least version 3.0, since wx 3.1
| GL_MAJOR_VERSION Int -- ^ request a specific OpenGL major version number (>= 3), since wx 3.1
| GL_MINOR_VERSION Int -- ^ request a specific OpenGL minor version number (e.g. 2 for 3.2), since wx 3.1
encodeAttributes :: [GLAttribute] -> [Int]
encodeAttributes attributes
= concatMap encodeAttribute attributes
encodeAttribute :: GLAttribute -> [Int]
encodeAttribute attr
= case attr of
GL_RGBA -> [1]
GL_BUFFER_SIZE n -> [2,n]
GL_LEVEL n -> [3, case n of { GT -> 1; LT -> (-1); _other -> 0 }]
GL_DOUBLEBUFFER -> [4]
GL_STEREO -> [5]
GL_AUX_BUFFERS n -> [6,n]
GL_MIN_RED n -> [7,n]
GL_MIN_GREEN n -> [8,n]
GL_MIN_BLUE n -> [9,n]
GL_MIN_ALPHA n -> [10,n]
GL_DEPTH_SIZE n -> [11,n]
GL_STENCIL_SIZE n -> [12,n]
GL_MIN_ACCUM_RED n -> [13,n]
GL_MIN_ACCUM_GREEN n -> [14,n]
GL_MIN_ACCUM_BLUE n -> [15,n]
GL_MIN_ACCUM_ALPHA n -> [16,n]
GL_SAMPLE_BUFFERS n -> [17,n]
GL_SAMPLES n -> [18,n]
GL_CORE_PROFILE -> [19]
GL_MAJOR_VERSION n -> [20,n]
GL_MINOR_VERSION n -> [21,n]
-- | Create a standard openGL canvas window with a certain title and attributes.
glCanvasCreateDefault :: Window a -> Style -> String -> [GLAttribute] -> IO (GLCanvas ())
glCanvasCreateDefault parent style title attrs
= glCanvasCreateEx parent idAny rectNull style title attrs nullPalette
-- | Create an openGL window. Use 'nullPalette' to use the default palette.
glCanvasCreateEx :: Window a -> Id -> Rect -> Style -> String -> [GLAttribute] -> Palette b -> IO (GLCanvas ())
glCanvasCreateEx parent id' rect' style title attributes palette
= withArray0 (toCInt 0) (map toCInt (encodeAttributes attributes)) $ \pattrs ->
glCanvasCreate parent id' pattrs rect' style title palette
|
sherwoodwang/wxHaskell
|
wxcore/src/haskell/Graphics/UI/WXCore/OpenGL.hs
|
lgpl-2.1
| 4,483 | 0 | 15 | 1,168 | 741 | 419 | 322 | 65 | 23 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-----------------------------------------------------------------
-- Autogenerated by Thrift
-- --
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
module Module2_Types where
import Prelude ( Bool(..), Enum, Float, IO, Double, String, Maybe(..),
Eq, Show, Ord,
concat, error, fromIntegral, fromEnum, length, map,
maybe, not, null, otherwise, return, show, toEnum,
enumFromTo, Bounded, minBound, maxBound, seq, succ,
pred, enumFrom, enumFromThen, enumFromThenTo,
(.), (&&), (||), (==), (++), ($), (-), (>>=), (>>))
import qualified Control.Applicative as Applicative (ZipList(..))
import Control.Applicative ( (<*>) )
import qualified Control.DeepSeq as DeepSeq
import qualified Control.Exception as Exception
import qualified Control.Monad as Monad ( liftM, ap, when )
import qualified Data.ByteString.Lazy as BS
import Data.Functor ( (<$>) )
import qualified Data.Hashable as Hashable
import qualified Data.Int as Int
import Data.List
import qualified Data.Maybe as Maybe (catMaybes)
import qualified Data.Text.Lazy.Encoding as Encoding ( decodeUtf8, encodeUtf8 )
import qualified Data.Text.Lazy as LT
import qualified Data.Typeable as Typeable ( Typeable )
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Vector as Vector
import qualified Test.QuickCheck.Arbitrary as Arbitrary ( Arbitrary(..) )
import qualified Test.QuickCheck as QuickCheck ( elements )
import qualified Thrift
import qualified Thrift.Types as Types
import qualified Thrift.Serializable as Serializable
import qualified Thrift.Arbitraries as Arbitraries
import qualified Module0_Types as Module0_Types
import qualified Module1_Types as Module1_Types
data Struct = Struct
{ struct_first :: Module0_Types.Struct
, struct_second :: Module1_Types.Struct
} deriving (Show,Eq,Typeable.Typeable)
instance Serializable.ThriftSerializable Struct where
encode = encode_Struct
decode = decode_Struct
instance Hashable.Hashable Struct where
hashWithSalt salt record = salt `Hashable.hashWithSalt` struct_first record `Hashable.hashWithSalt` struct_second record
instance DeepSeq.NFData Struct where
rnf _record0 =
DeepSeq.rnf (struct_first _record0) `seq`
DeepSeq.rnf (struct_second _record0) `seq`
()
instance Arbitrary.Arbitrary Struct where
arbitrary = Monad.liftM Struct (Arbitrary.arbitrary)
`Monad.ap`(Arbitrary.arbitrary)
shrink obj | obj == default_Struct = []
| otherwise = Maybe.catMaybes
[ if obj == default_Struct{struct_first = struct_first obj} then Nothing else Just $ default_Struct{struct_first = struct_first obj}
, if obj == default_Struct{struct_second = struct_second obj} then Nothing else Just $ default_Struct{struct_second = struct_second obj}
]
from_Struct :: Struct -> Types.ThriftVal
from_Struct record = Types.TStruct $ Map.fromList $ Maybe.catMaybes
[ (\_v3 -> Just (1, ("first",Module0_Types.from_Struct _v3))) $ struct_first record
, (\_v3 -> Just (2, ("second",Module1_Types.from_Struct _v3))) $ struct_second record
]
write_Struct :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Struct -> IO ()
write_Struct oprot record = Thrift.writeVal oprot $ from_Struct record
encode_Struct :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Struct -> BS.ByteString
encode_Struct oprot record = Thrift.serializeVal oprot $ from_Struct record
to_Struct :: Types.ThriftVal -> Struct
to_Struct (Types.TStruct fields) = Struct{
struct_first = maybe (struct_first default_Struct) (\(_,_val5) -> (case _val5 of {Types.TStruct _val6 -> (Module0_Types.to_Struct (Types.TStruct _val6)); _ -> error "wrong type"})) (Map.lookup (1) fields),
struct_second = maybe (struct_second default_Struct) (\(_,_val5) -> (case _val5 of {Types.TStruct _val7 -> (Module1_Types.to_Struct (Types.TStruct _val7)); _ -> error "wrong type"})) (Map.lookup (2) fields)
}
to_Struct _ = error "not a struct"
read_Struct :: (Thrift.Transport t, Thrift.Protocol p) => p t -> IO Struct
read_Struct iprot = to_Struct <$> Thrift.readVal iprot (Types.T_STRUCT typemap_Struct)
decode_Struct :: (Thrift.Protocol p, Thrift.Transport t) => p t -> BS.ByteString -> Struct
decode_Struct iprot bs = to_Struct $ Thrift.deserializeVal iprot (Types.T_STRUCT typemap_Struct) bs
typemap_Struct :: Types.TypeMap
typemap_Struct = Map.fromList [("first",(1,(Types.T_STRUCT Module0_Types.typemap_Struct))),("second",(2,(Types.T_STRUCT Module1_Types.typemap_Struct)))]
default_Struct :: Struct
default_Struct = Struct{
struct_first = Module0_Types.default_Struct,
struct_second = Module1_Types.default_Struct}
data BigStruct = BigStruct
{ bigStruct_s :: Module2_Types.Struct
, bigStruct_id :: Int.Int32
} deriving (Show,Eq,Typeable.Typeable)
instance Serializable.ThriftSerializable BigStruct where
encode = encode_BigStruct
decode = decode_BigStruct
instance Hashable.Hashable BigStruct where
hashWithSalt salt record = salt `Hashable.hashWithSalt` bigStruct_s record `Hashable.hashWithSalt` bigStruct_id record
instance DeepSeq.NFData BigStruct where
rnf _record8 =
DeepSeq.rnf (bigStruct_s _record8) `seq`
DeepSeq.rnf (bigStruct_id _record8) `seq`
()
instance Arbitrary.Arbitrary BigStruct where
arbitrary = Monad.liftM BigStruct (Arbitrary.arbitrary)
`Monad.ap`(Arbitrary.arbitrary)
shrink obj | obj == default_BigStruct = []
| otherwise = Maybe.catMaybes
[ if obj == default_BigStruct{bigStruct_s = bigStruct_s obj} then Nothing else Just $ default_BigStruct{bigStruct_s = bigStruct_s obj}
, if obj == default_BigStruct{bigStruct_id = bigStruct_id obj} then Nothing else Just $ default_BigStruct{bigStruct_id = bigStruct_id obj}
]
from_BigStruct :: BigStruct -> Types.ThriftVal
from_BigStruct record = Types.TStruct $ Map.fromList $ Maybe.catMaybes
[ (\_v11 -> Just (1, ("s",Module2_Types.from_Struct _v11))) $ bigStruct_s record
, (\_v11 -> Just (2, ("id",Types.TI32 _v11))) $ bigStruct_id record
]
write_BigStruct :: (Thrift.Protocol p, Thrift.Transport t) => p t -> BigStruct -> IO ()
write_BigStruct oprot record = Thrift.writeVal oprot $ from_BigStruct record
encode_BigStruct :: (Thrift.Protocol p, Thrift.Transport t) => p t -> BigStruct -> BS.ByteString
encode_BigStruct oprot record = Thrift.serializeVal oprot $ from_BigStruct record
to_BigStruct :: Types.ThriftVal -> BigStruct
to_BigStruct (Types.TStruct fields) = BigStruct{
bigStruct_s = maybe (bigStruct_s default_BigStruct) (\(_,_val13) -> (case _val13 of {Types.TStruct _val14 -> (Module2_Types.to_Struct (Types.TStruct _val14)); _ -> error "wrong type"})) (Map.lookup (1) fields),
bigStruct_id = maybe (bigStruct_id default_BigStruct) (\(_,_val13) -> (case _val13 of {Types.TI32 _val15 -> _val15; _ -> error "wrong type"})) (Map.lookup (2) fields)
}
to_BigStruct _ = error "not a struct"
read_BigStruct :: (Thrift.Transport t, Thrift.Protocol p) => p t -> IO BigStruct
read_BigStruct iprot = to_BigStruct <$> Thrift.readVal iprot (Types.T_STRUCT typemap_BigStruct)
decode_BigStruct :: (Thrift.Protocol p, Thrift.Transport t) => p t -> BS.ByteString -> BigStruct
decode_BigStruct iprot bs = to_BigStruct $ Thrift.deserializeVal iprot (Types.T_STRUCT typemap_BigStruct) bs
typemap_BigStruct :: Types.TypeMap
typemap_BigStruct = Map.fromList [("s",(1,(Types.T_STRUCT Module2_Types.typemap_Struct))),("id",(2,Types.T_I32))]
default_BigStruct :: BigStruct
default_BigStruct = BigStruct{
bigStruct_s = Module2_Types.default_Struct,
bigStruct_id = 0}
|
getyourguide/fbthrift
|
thrift/compiler/test/fixtures/qualified/gen-hs/Module2_Types.hs
|
apache-2.0
| 8,049 | 0 | 18 | 1,232 | 2,393 | 1,353 | 1,040 | 128 | 3 |
{-
(c) The AQUA Project, Glasgow University, 1993-1998
\section[CoreMonad]{The core pipeline monad}
-}
{-# LANGUAGE CPP #-}
module CoreMonad (
-- * Configuration of the core-to-core passes
CoreToDo(..), runWhen, runMaybe,
SimplifierMode(..),
FloatOutSwitches(..),
pprPassDetails,
-- * Plugins
PluginPass, bindsOnlyPass,
-- * Counting
SimplCount, doSimplTick, doFreeSimplTick, simplCountN,
pprSimplCount, plusSimplCount, zeroSimplCount,
isZeroSimplCount, hasDetailedCounts, Tick(..),
-- * The monad
CoreM, runCoreM,
-- ** Reading from the monad
getHscEnv, getRuleBase, getModule,
getDynFlags, getOrigNameCache, getPackageFamInstEnv,
getVisibleOrphanMods,
getPrintUnqualified, getSrcSpanM,
-- ** Writing to the monad
addSimplCount,
-- ** Lifting into the monad
liftIO, liftIOWithCount,
liftIO1, liftIO2, liftIO3, liftIO4,
-- ** Global initialization
reinitializeGlobals,
-- ** Dealing with annotations
getAnnotations, getFirstAnnotations,
-- ** Screen output
putMsg, putMsgS, errorMsg, errorMsgS, warnMsg,
fatalErrorMsg, fatalErrorMsgS,
debugTraceMsg, debugTraceMsgS,
dumpIfSet_dyn,
#ifdef GHCI
-- * Getting 'Name's
thNameToGhcName
#endif
) where
#ifdef GHCI
import Name( Name )
import TcRnMonad ( initTcForLookup )
#endif
import CoreSyn
import HscTypes
import Module
import DynFlags
import StaticFlags
import BasicTypes ( CompilerPhase(..) )
import Annotations
import IOEnv hiding ( liftIO, failM, failWithM )
import qualified IOEnv ( liftIO )
import TcEnv ( lookupGlobal )
import Var
import Outputable
import FastString
import qualified ErrUtils as Err
import ErrUtils( Severity(..) )
import Maybes
import UniqSupply
import UniqFM ( UniqFM, mapUFM, filterUFM )
import MonadUtils
import SrcLoc
import ListSetOps ( runs )
import Data.List
import Data.Ord
import Data.Dynamic
import Data.IORef
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Word
import Control.Monad
import Control.Applicative ( Alternative(..) )
import Prelude hiding ( read )
#ifdef GHCI
import Control.Concurrent.MVar (MVar)
import Linker ( PersistentLinkerState, saveLinkerGlobals, restoreLinkerGlobals )
import {-# SOURCE #-} TcSplice ( lookupThName_maybe )
import qualified Language.Haskell.TH as TH
#else
saveLinkerGlobals :: IO ()
saveLinkerGlobals = return ()
restoreLinkerGlobals :: () -> IO ()
restoreLinkerGlobals () = return ()
#endif
{-
************************************************************************
* *
The CoreToDo type and related types
Abstraction of core-to-core passes to run.
* *
************************************************************************
-}
data CoreToDo -- These are diff core-to-core passes,
-- which may be invoked in any order,
-- as many times as you like.
= CoreDoSimplify -- The core-to-core simplifier.
Int -- Max iterations
SimplifierMode
| CoreDoPluginPass String PluginPass
| CoreDoFloatInwards
| CoreDoFloatOutwards FloatOutSwitches
| CoreLiberateCase
| CoreDoPrintCore
| CoreDoStaticArgs
| CoreDoCallArity
| CoreDoStrictness
| CoreDoWorkerWrapper
| CoreDoSpecialising
| CoreDoSpecConstr
| CoreCSE
| CoreDoRuleCheck CompilerPhase String -- Check for non-application of rules
-- matching this string
| CoreDoVectorisation
| CoreDoNothing -- Useful when building up
| CoreDoPasses [CoreToDo] -- lists of these things
| CoreDesugar -- Right after desugaring, no simple optimisation yet!
| CoreDesugarOpt -- CoreDesugarXXX: Not strictly a core-to-core pass, but produces
-- Core output, and hence useful to pass to endPass
| CoreTidy
| CorePrep
instance Outputable CoreToDo where
ppr (CoreDoSimplify _ _) = text "Simplifier"
ppr (CoreDoPluginPass s _) = text "Core plugin: " <+> text s
ppr CoreDoFloatInwards = text "Float inwards"
ppr (CoreDoFloatOutwards f) = text "Float out" <> parens (ppr f)
ppr CoreLiberateCase = text "Liberate case"
ppr CoreDoStaticArgs = text "Static argument"
ppr CoreDoCallArity = text "Called arity analysis"
ppr CoreDoStrictness = text "Demand analysis"
ppr CoreDoWorkerWrapper = text "Worker Wrapper binds"
ppr CoreDoSpecialising = text "Specialise"
ppr CoreDoSpecConstr = text "SpecConstr"
ppr CoreCSE = text "Common sub-expression"
ppr CoreDoVectorisation = text "Vectorisation"
ppr CoreDesugar = text "Desugar (before optimization)"
ppr CoreDesugarOpt = text "Desugar (after optimization)"
ppr CoreTidy = text "Tidy Core"
ppr CorePrep = text "CorePrep"
ppr CoreDoPrintCore = text "Print core"
ppr (CoreDoRuleCheck {}) = text "Rule check"
ppr CoreDoNothing = text "CoreDoNothing"
ppr (CoreDoPasses passes) = text "CoreDoPasses" <+> ppr passes
pprPassDetails :: CoreToDo -> SDoc
pprPassDetails (CoreDoSimplify n md) = vcat [ text "Max iterations =" <+> int n
, ppr md ]
pprPassDetails _ = Outputable.empty
data SimplifierMode -- See comments in SimplMonad
= SimplMode
{ sm_names :: [String] -- Name(s) of the phase
, sm_phase :: CompilerPhase
, sm_rules :: Bool -- Whether RULES are enabled
, sm_inline :: Bool -- Whether inlining is enabled
, sm_case_case :: Bool -- Whether case-of-case is enabled
, sm_eta_expand :: Bool -- Whether eta-expansion is enabled
}
instance Outputable SimplifierMode where
ppr (SimplMode { sm_phase = p, sm_names = ss
, sm_rules = r, sm_inline = i
, sm_eta_expand = eta, sm_case_case = cc })
= text "SimplMode" <+> braces (
sep [ text "Phase =" <+> ppr p <+>
brackets (text (concat $ intersperse "," ss)) <> comma
, pp_flag i (sLit "inline") <> comma
, pp_flag r (sLit "rules") <> comma
, pp_flag eta (sLit "eta-expand") <> comma
, pp_flag cc (sLit "case-of-case") ])
where
pp_flag f s = ppUnless f (text "no") <+> ptext s
data FloatOutSwitches = FloatOutSwitches {
floatOutLambdas :: Maybe Int, -- ^ Just n <=> float lambdas to top level, if
-- doing so will abstract over n or fewer
-- value variables
-- Nothing <=> float all lambdas to top level,
-- regardless of how many free variables
-- Just 0 is the vanilla case: float a lambda
-- iff it has no free vars
floatOutConstants :: Bool, -- ^ True <=> float constants to top level,
-- even if they do not escape a lambda
floatOutOverSatApps :: Bool,
-- ^ True <=> float out over-saturated applications
-- based on arity information.
-- See Note [Floating over-saturated applications]
-- in SetLevels
floatToTopLevelOnly :: Bool -- ^ Allow floating to the top level only.
}
instance Outputable FloatOutSwitches where
ppr = pprFloatOutSwitches
pprFloatOutSwitches :: FloatOutSwitches -> SDoc
pprFloatOutSwitches sw
= text "FOS" <+> (braces $
sep $ punctuate comma $
[ text "Lam =" <+> ppr (floatOutLambdas sw)
, text "Consts =" <+> ppr (floatOutConstants sw)
, text "OverSatApps =" <+> ppr (floatOutOverSatApps sw) ])
-- The core-to-core pass ordering is derived from the DynFlags:
runWhen :: Bool -> CoreToDo -> CoreToDo
runWhen True do_this = do_this
runWhen False _ = CoreDoNothing
runMaybe :: Maybe a -> (a -> CoreToDo) -> CoreToDo
runMaybe (Just x) f = f x
runMaybe Nothing _ = CoreDoNothing
{-
Note [RULEs enabled in SimplGently]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
RULES are enabled when doing "gentle" simplification. Two reasons:
* We really want the class-op cancellation to happen:
op (df d1 d2) --> $cop3 d1 d2
because this breaks the mutual recursion between 'op' and 'df'
* I wanted the RULE
lift String ===> ...
to work in Template Haskell when simplifying
splices, so we get simpler code for literal strings
But watch out: list fusion can prevent floating. So use phase control
to switch off those rules until after floating.
************************************************************************
* *
Types for Plugins
* *
************************************************************************
-}
-- | A description of the plugin pass itself
type PluginPass = ModGuts -> CoreM ModGuts
bindsOnlyPass :: (CoreProgram -> CoreM CoreProgram) -> ModGuts -> CoreM ModGuts
bindsOnlyPass pass guts
= do { binds' <- pass (mg_binds guts)
; return (guts { mg_binds = binds' }) }
{-
************************************************************************
* *
Counting and logging
* *
************************************************************************
-}
verboseSimplStats :: Bool
verboseSimplStats = opt_PprStyle_Debug -- For now, anyway
zeroSimplCount :: DynFlags -> SimplCount
isZeroSimplCount :: SimplCount -> Bool
hasDetailedCounts :: SimplCount -> Bool
pprSimplCount :: SimplCount -> SDoc
doSimplTick :: DynFlags -> Tick -> SimplCount -> SimplCount
doFreeSimplTick :: Tick -> SimplCount -> SimplCount
plusSimplCount :: SimplCount -> SimplCount -> SimplCount
data SimplCount
= VerySimplCount !Int -- Used when don't want detailed stats
| SimplCount {
ticks :: !Int, -- Total ticks
details :: !TickCounts, -- How many of each type
n_log :: !Int, -- N
log1 :: [Tick], -- Last N events; <= opt_HistorySize,
-- most recent first
log2 :: [Tick] -- Last opt_HistorySize events before that
-- Having log1, log2 lets us accumulate the
-- recent history reasonably efficiently
}
type TickCounts = Map Tick Int
simplCountN :: SimplCount -> Int
simplCountN (VerySimplCount n) = n
simplCountN (SimplCount { ticks = n }) = n
zeroSimplCount dflags
-- This is where we decide whether to do
-- the VerySimpl version or the full-stats version
| dopt Opt_D_dump_simpl_stats dflags
= SimplCount {ticks = 0, details = Map.empty,
n_log = 0, log1 = [], log2 = []}
| otherwise
= VerySimplCount 0
isZeroSimplCount (VerySimplCount n) = n==0
isZeroSimplCount (SimplCount { ticks = n }) = n==0
hasDetailedCounts (VerySimplCount {}) = False
hasDetailedCounts (SimplCount {}) = True
doFreeSimplTick tick sc@SimplCount { details = dts }
= sc { details = dts `addTick` tick }
doFreeSimplTick _ sc = sc
doSimplTick dflags tick
sc@(SimplCount { ticks = tks, details = dts, n_log = nl, log1 = l1 })
| nl >= historySize dflags = sc1 { n_log = 1, log1 = [tick], log2 = l1 }
| otherwise = sc1 { n_log = nl+1, log1 = tick : l1 }
where
sc1 = sc { ticks = tks+1, details = dts `addTick` tick }
doSimplTick _ _ (VerySimplCount n) = VerySimplCount (n+1)
-- Don't use Map.unionWith because that's lazy, and we want to
-- be pretty strict here!
addTick :: TickCounts -> Tick -> TickCounts
addTick fm tick = case Map.lookup tick fm of
Nothing -> Map.insert tick 1 fm
Just n -> n1 `seq` Map.insert tick n1 fm
where
n1 = n+1
plusSimplCount sc1@(SimplCount { ticks = tks1, details = dts1 })
sc2@(SimplCount { ticks = tks2, details = dts2 })
= log_base { ticks = tks1 + tks2, details = Map.unionWith (+) dts1 dts2 }
where
-- A hackish way of getting recent log info
log_base | null (log1 sc2) = sc1 -- Nothing at all in sc2
| null (log2 sc2) = sc2 { log2 = log1 sc1 }
| otherwise = sc2
plusSimplCount (VerySimplCount n) (VerySimplCount m) = VerySimplCount (n+m)
plusSimplCount _ _ = panic "plusSimplCount"
-- We use one or the other consistently
pprSimplCount (VerySimplCount n) = text "Total ticks:" <+> int n
pprSimplCount (SimplCount { ticks = tks, details = dts, log1 = l1, log2 = l2 })
= vcat [text "Total ticks: " <+> int tks,
blankLine,
pprTickCounts dts,
if verboseSimplStats then
vcat [blankLine,
text "Log (most recent first)",
nest 4 (vcat (map ppr l1) $$ vcat (map ppr l2))]
else Outputable.empty
]
pprTickCounts :: Map Tick Int -> SDoc
pprTickCounts counts
= vcat (map pprTickGroup groups)
where
groups :: [[(Tick,Int)]] -- Each group shares a comon tag
-- toList returns common tags adjacent
groups = runs same_tag (Map.toList counts)
same_tag (tick1,_) (tick2,_) = tickToTag tick1 == tickToTag tick2
pprTickGroup :: [(Tick, Int)] -> SDoc
pprTickGroup group@((tick1,_):_)
= hang (int (sum [n | (_,n) <- group]) <+> text (tickString tick1))
2 (vcat [ int n <+> pprTickCts tick
-- flip as we want largest first
| (tick,n) <- sortBy (flip (comparing snd)) group])
pprTickGroup [] = panic "pprTickGroup"
data Tick
= PreInlineUnconditionally Id
| PostInlineUnconditionally Id
| UnfoldingDone Id
| RuleFired FastString -- Rule name
| LetFloatFromLet
| EtaExpansion Id -- LHS binder
| EtaReduction Id -- Binder on outer lambda
| BetaReduction Id -- Lambda binder
| CaseOfCase Id -- Bndr on *inner* case
| KnownBranch Id -- Case binder
| CaseMerge Id -- Binder on outer case
| AltMerge Id -- Case binder
| CaseElim Id -- Case binder
| CaseIdentity Id -- Case binder
| FillInCaseDefault Id -- Case binder
| BottomFound
| SimplifierDone -- Ticked at each iteration of the simplifier
instance Outputable Tick where
ppr tick = text (tickString tick) <+> pprTickCts tick
instance Eq Tick where
a == b = case a `cmpTick` b of
EQ -> True
_ -> False
instance Ord Tick where
compare = cmpTick
tickToTag :: Tick -> Int
tickToTag (PreInlineUnconditionally _) = 0
tickToTag (PostInlineUnconditionally _) = 1
tickToTag (UnfoldingDone _) = 2
tickToTag (RuleFired _) = 3
tickToTag LetFloatFromLet = 4
tickToTag (EtaExpansion _) = 5
tickToTag (EtaReduction _) = 6
tickToTag (BetaReduction _) = 7
tickToTag (CaseOfCase _) = 8
tickToTag (KnownBranch _) = 9
tickToTag (CaseMerge _) = 10
tickToTag (CaseElim _) = 11
tickToTag (CaseIdentity _) = 12
tickToTag (FillInCaseDefault _) = 13
tickToTag BottomFound = 14
tickToTag SimplifierDone = 16
tickToTag (AltMerge _) = 17
tickString :: Tick -> String
tickString (PreInlineUnconditionally _) = "PreInlineUnconditionally"
tickString (PostInlineUnconditionally _)= "PostInlineUnconditionally"
tickString (UnfoldingDone _) = "UnfoldingDone"
tickString (RuleFired _) = "RuleFired"
tickString LetFloatFromLet = "LetFloatFromLet"
tickString (EtaExpansion _) = "EtaExpansion"
tickString (EtaReduction _) = "EtaReduction"
tickString (BetaReduction _) = "BetaReduction"
tickString (CaseOfCase _) = "CaseOfCase"
tickString (KnownBranch _) = "KnownBranch"
tickString (CaseMerge _) = "CaseMerge"
tickString (AltMerge _) = "AltMerge"
tickString (CaseElim _) = "CaseElim"
tickString (CaseIdentity _) = "CaseIdentity"
tickString (FillInCaseDefault _) = "FillInCaseDefault"
tickString BottomFound = "BottomFound"
tickString SimplifierDone = "SimplifierDone"
pprTickCts :: Tick -> SDoc
pprTickCts (PreInlineUnconditionally v) = ppr v
pprTickCts (PostInlineUnconditionally v)= ppr v
pprTickCts (UnfoldingDone v) = ppr v
pprTickCts (RuleFired v) = ppr v
pprTickCts LetFloatFromLet = Outputable.empty
pprTickCts (EtaExpansion v) = ppr v
pprTickCts (EtaReduction v) = ppr v
pprTickCts (BetaReduction v) = ppr v
pprTickCts (CaseOfCase v) = ppr v
pprTickCts (KnownBranch v) = ppr v
pprTickCts (CaseMerge v) = ppr v
pprTickCts (AltMerge v) = ppr v
pprTickCts (CaseElim v) = ppr v
pprTickCts (CaseIdentity v) = ppr v
pprTickCts (FillInCaseDefault v) = ppr v
pprTickCts _ = Outputable.empty
cmpTick :: Tick -> Tick -> Ordering
cmpTick a b = case (tickToTag a `compare` tickToTag b) of
GT -> GT
EQ -> cmpEqTick a b
LT -> LT
cmpEqTick :: Tick -> Tick -> Ordering
cmpEqTick (PreInlineUnconditionally a) (PreInlineUnconditionally b) = a `compare` b
cmpEqTick (PostInlineUnconditionally a) (PostInlineUnconditionally b) = a `compare` b
cmpEqTick (UnfoldingDone a) (UnfoldingDone b) = a `compare` b
cmpEqTick (RuleFired a) (RuleFired b) = a `compare` b
cmpEqTick (EtaExpansion a) (EtaExpansion b) = a `compare` b
cmpEqTick (EtaReduction a) (EtaReduction b) = a `compare` b
cmpEqTick (BetaReduction a) (BetaReduction b) = a `compare` b
cmpEqTick (CaseOfCase a) (CaseOfCase b) = a `compare` b
cmpEqTick (KnownBranch a) (KnownBranch b) = a `compare` b
cmpEqTick (CaseMerge a) (CaseMerge b) = a `compare` b
cmpEqTick (AltMerge a) (AltMerge b) = a `compare` b
cmpEqTick (CaseElim a) (CaseElim b) = a `compare` b
cmpEqTick (CaseIdentity a) (CaseIdentity b) = a `compare` b
cmpEqTick (FillInCaseDefault a) (FillInCaseDefault b) = a `compare` b
cmpEqTick _ _ = EQ
{-
************************************************************************
* *
Monad and carried data structure definitions
* *
************************************************************************
-}
newtype CoreState = CoreState {
cs_uniq_supply :: UniqSupply
}
data CoreReader = CoreReader {
cr_hsc_env :: HscEnv,
cr_rule_base :: RuleBase,
cr_module :: Module,
cr_print_unqual :: PrintUnqualified,
cr_loc :: SrcSpan, -- Use this for log/error messages so they
-- are at least tagged with the right source file
cr_visible_orphan_mods :: !ModuleSet,
#ifdef GHCI
cr_globals :: (MVar PersistentLinkerState, Bool)
#else
cr_globals :: ()
#endif
}
-- Note: CoreWriter used to be defined with data, rather than newtype. If it
-- is defined that way again, the cw_simpl_count field, at least, must be
-- strict to avoid a space leak (Trac #7702).
newtype CoreWriter = CoreWriter {
cw_simpl_count :: SimplCount
}
emptyWriter :: DynFlags -> CoreWriter
emptyWriter dflags = CoreWriter {
cw_simpl_count = zeroSimplCount dflags
}
plusWriter :: CoreWriter -> CoreWriter -> CoreWriter
plusWriter w1 w2 = CoreWriter {
cw_simpl_count = (cw_simpl_count w1) `plusSimplCount` (cw_simpl_count w2)
}
type CoreIOEnv = IOEnv CoreReader
-- | The monad used by Core-to-Core passes to access common state, register simplification
-- statistics and so on
newtype CoreM a = CoreM { unCoreM :: CoreState -> CoreIOEnv (a, CoreState, CoreWriter) }
instance Functor CoreM where
fmap = liftM
instance Monad CoreM where
mx >>= f = CoreM $ \s -> do
(x, s', w1) <- unCoreM mx s
(y, s'', w2) <- unCoreM (f x) s'
let w = w1 `plusWriter` w2
return $ seq w (y, s'', w)
-- forcing w before building the tuple avoids a space leak
-- (Trac #7702)
instance Applicative CoreM where
pure x = CoreM $ \s -> nop s x
(<*>) = ap
m *> k = m >>= \_ -> k
instance Alternative CoreM where
empty = CoreM (const Control.Applicative.empty)
m <|> n = CoreM (\rs -> unCoreM m rs <|> unCoreM n rs)
instance MonadPlus CoreM
instance MonadUnique CoreM where
getUniqueSupplyM = do
us <- getS cs_uniq_supply
let (us1, us2) = splitUniqSupply us
modifyS (\s -> s { cs_uniq_supply = us2 })
return us1
getUniqueM = do
us <- getS cs_uniq_supply
let (u,us') = takeUniqFromSupply us
modifyS (\s -> s { cs_uniq_supply = us' })
return u
runCoreM :: HscEnv
-> RuleBase
-> UniqSupply
-> Module
-> ModuleSet
-> PrintUnqualified
-> SrcSpan
-> CoreM a
-> IO (a, SimplCount)
runCoreM hsc_env rule_base us mod orph_imps print_unqual loc m
= do { glbls <- saveLinkerGlobals
; liftM extract $ runIOEnv (reader glbls) $ unCoreM m state }
where
reader glbls = CoreReader {
cr_hsc_env = hsc_env,
cr_rule_base = rule_base,
cr_module = mod,
cr_visible_orphan_mods = orph_imps,
cr_globals = glbls,
cr_print_unqual = print_unqual,
cr_loc = loc
}
state = CoreState {
cs_uniq_supply = us
}
extract :: (a, CoreState, CoreWriter) -> (a, SimplCount)
extract (value, _, writer) = (value, cw_simpl_count writer)
{-
************************************************************************
* *
Core combinators, not exported
* *
************************************************************************
-}
nop :: CoreState -> a -> CoreIOEnv (a, CoreState, CoreWriter)
nop s x = do
r <- getEnv
return (x, s, emptyWriter $ (hsc_dflags . cr_hsc_env) r)
read :: (CoreReader -> a) -> CoreM a
read f = CoreM (\s -> getEnv >>= (\r -> nop s (f r)))
getS :: (CoreState -> a) -> CoreM a
getS f = CoreM (\s -> nop s (f s))
modifyS :: (CoreState -> CoreState) -> CoreM ()
modifyS f = CoreM (\s -> nop (f s) ())
write :: CoreWriter -> CoreM ()
write w = CoreM (\s -> return ((), s, w))
-- \subsection{Lifting IO into the monad}
-- | Lift an 'IOEnv' operation into 'CoreM'
liftIOEnv :: CoreIOEnv a -> CoreM a
liftIOEnv mx = CoreM (\s -> mx >>= (\x -> nop s x))
instance MonadIO CoreM where
liftIO = liftIOEnv . IOEnv.liftIO
-- | Lift an 'IO' operation into 'CoreM' while consuming its 'SimplCount'
liftIOWithCount :: IO (SimplCount, a) -> CoreM a
liftIOWithCount what = liftIO what >>= (\(count, x) -> addSimplCount count >> return x)
{-
************************************************************************
* *
Reader, writer and state accessors
* *
************************************************************************
-}
getHscEnv :: CoreM HscEnv
getHscEnv = read cr_hsc_env
getRuleBase :: CoreM RuleBase
getRuleBase = read cr_rule_base
getVisibleOrphanMods :: CoreM ModuleSet
getVisibleOrphanMods = read cr_visible_orphan_mods
getPrintUnqualified :: CoreM PrintUnqualified
getPrintUnqualified = read cr_print_unqual
getSrcSpanM :: CoreM SrcSpan
getSrcSpanM = read cr_loc
addSimplCount :: SimplCount -> CoreM ()
addSimplCount count = write (CoreWriter { cw_simpl_count = count })
-- Convenience accessors for useful fields of HscEnv
instance HasDynFlags CoreM where
getDynFlags = fmap hsc_dflags getHscEnv
instance HasModule CoreM where
getModule = read cr_module
-- | The original name cache is the current mapping from 'Module' and
-- 'OccName' to a compiler-wide unique 'Name'
getOrigNameCache :: CoreM OrigNameCache
getOrigNameCache = do
nameCacheRef <- fmap hsc_NC getHscEnv
liftIO $ fmap nsNames $ readIORef nameCacheRef
getPackageFamInstEnv :: CoreM PackageFamInstEnv
getPackageFamInstEnv = do
hsc_env <- getHscEnv
eps <- liftIO $ hscEPS hsc_env
return $ eps_fam_inst_env eps
{-
************************************************************************
* *
Initializing globals
* *
************************************************************************
This is a rather annoying function. When a plugin is loaded, it currently
gets linked against a *newly loaded* copy of the GHC package. This would
not be a problem, except that the new copy has its own mutable state
that is not shared with that state that has already been initialized by
the original GHC package.
(NB This mechanism is sufficient for granting plugins read-only access to
globals that are guaranteed to be initialized before the plugin is loaded. If
any further synchronization is necessary, I would suggest using the more
sophisticated mechanism involving GHC.Conc.Sync.sharedCAF and rts/Globals.c to
share a single instance of the global variable among the compiler and the
plugins. Perhaps we should migrate all global variables to use that mechanism,
for robustness... -- NSF July 2013)
This leads to loaded plugins calling GHC code which pokes the static flags,
and then dying with a panic because the static flags *it* sees are uninitialized.
There are two possible solutions:
1. Export the symbols from the GHC executable from the GHC library and link
against this existing copy rather than a new copy of the GHC library
2. Carefully ensure that the global state in the two copies of the GHC
library matches
I tried 1. and it *almost* works (and speeds up plugin load times!) except
on Windows. On Windows the GHC library tends to export more than 65536 symbols
(see #5292) which overflows the limit of what we can export from the EXE and
causes breakage.
(Note that if the GHC executable was dynamically linked this wouldn't be a
problem, because we could share the GHC library it links to.)
We are going to try 2. instead. Unfortunately, this means that every plugin
will have to say `reinitializeGlobals` before it does anything, but never mind.
I've threaded the cr_globals through CoreM rather than giving them as an
argument to the plugin function so that we can turn this function into
(return ()) without breaking any plugins when we eventually get 1. working.
-}
reinitializeGlobals :: CoreM ()
reinitializeGlobals = do
linker_globals <- read cr_globals
hsc_env <- getHscEnv
let dflags = hsc_dflags hsc_env
liftIO $ restoreLinkerGlobals linker_globals
liftIO $ setUnsafeGlobalDynFlags dflags
{-
************************************************************************
* *
Dealing with annotations
* *
************************************************************************
-}
-- | Get all annotations of a given type. This happens lazily, that is
-- no deserialization will take place until the [a] is actually demanded and
-- the [a] can also be empty (the UniqFM is not filtered).
--
-- This should be done once at the start of a Core-to-Core pass that uses
-- annotations.
--
-- See Note [Annotations]
getAnnotations :: Typeable a => ([Word8] -> a) -> ModGuts -> CoreM (UniqFM [a])
getAnnotations deserialize guts = do
hsc_env <- getHscEnv
ann_env <- liftIO $ prepareAnnotations hsc_env (Just guts)
return (deserializeAnns deserialize ann_env)
-- | Get at most one annotation of a given type per Unique.
getFirstAnnotations :: Typeable a => ([Word8] -> a) -> ModGuts -> CoreM (UniqFM a)
getFirstAnnotations deserialize guts
= liftM (mapUFM head . filterUFM (not . null))
$ getAnnotations deserialize guts
{-
Note [Annotations]
~~~~~~~~~~~~~~~~~~
A Core-to-Core pass that wants to make use of annotations calls
getAnnotations or getFirstAnnotations at the beginning to obtain a UniqFM with
annotations of a specific type. This produces all annotations from interface
files read so far. However, annotations from interface files read during the
pass will not be visible until getAnnotations is called again. This is similar
to how rules work and probably isn't too bad.
The current implementation could be optimised a bit: when looking up
annotations for a thing from the HomePackageTable, we could search directly in
the module where the thing is defined rather than building one UniqFM which
contains all annotations we know of. This would work because annotations can
only be given to things defined in the same module. However, since we would
only want to deserialise every annotation once, we would have to build a cache
for every module in the HTP. In the end, it's probably not worth it as long as
we aren't using annotations heavily.
************************************************************************
* *
Direct screen output
* *
************************************************************************
-}
msg :: Severity -> SDoc -> CoreM ()
msg sev doc
= do { dflags <- getDynFlags
; loc <- getSrcSpanM
; unqual <- getPrintUnqualified
; let sty = case sev of
SevError -> err_sty
SevWarning -> err_sty
SevDump -> dump_sty
_ -> user_sty
err_sty = mkErrStyle dflags unqual
user_sty = mkUserStyle unqual AllTheWay
dump_sty = mkDumpStyle unqual
; liftIO $
(log_action dflags) dflags NoReason sev loc sty doc }
-- | Output a String message to the screen
putMsgS :: String -> CoreM ()
putMsgS = putMsg . text
-- | Output a message to the screen
putMsg :: SDoc -> CoreM ()
putMsg = msg SevInfo
-- | Output an error to the screen. Does not cause the compiler to die.
errorMsgS :: String -> CoreM ()
errorMsgS = errorMsg . text
-- | Output an error to the screen. Does not cause the compiler to die.
errorMsg :: SDoc -> CoreM ()
errorMsg = msg SevError
warnMsg :: SDoc -> CoreM ()
warnMsg = msg SevWarning
-- | Output a fatal error to the screen. Does not cause the compiler to die.
fatalErrorMsgS :: String -> CoreM ()
fatalErrorMsgS = fatalErrorMsg . text
-- | Output a fatal error to the screen. Does not cause the compiler to die.
fatalErrorMsg :: SDoc -> CoreM ()
fatalErrorMsg = msg SevFatal
-- | Output a string debugging message at verbosity level of @-v@ or higher
debugTraceMsgS :: String -> CoreM ()
debugTraceMsgS = debugTraceMsg . text
-- | Outputs a debugging message at verbosity level of @-v@ or higher
debugTraceMsg :: SDoc -> CoreM ()
debugTraceMsg = msg SevDump
-- | Show some labelled 'SDoc' if a particular flag is set or at a verbosity level of @-v -ddump-most@ or higher
dumpIfSet_dyn :: DumpFlag -> String -> SDoc -> CoreM ()
dumpIfSet_dyn flag str doc
= do { dflags <- getDynFlags
; unqual <- getPrintUnqualified
; when (dopt flag dflags) $ liftIO $
Err.dumpSDoc dflags unqual flag str doc }
{-
************************************************************************
* *
Finding TyThings
* *
************************************************************************
-}
instance MonadThings CoreM where
lookupThing name = do { hsc_env <- getHscEnv
; liftIO $ lookupGlobal hsc_env name }
{-
************************************************************************
* *
Template Haskell interoperability
* *
************************************************************************
-}
#ifdef GHCI
-- | Attempt to convert a Template Haskell name to one that GHC can
-- understand. Original TH names such as those you get when you use
-- the @'foo@ syntax will be translated to their equivalent GHC name
-- exactly. Qualified or unqualifed TH names will be dynamically bound
-- to names in the module being compiled, if possible. Exact TH names
-- will be bound to the name they represent, exactly.
thNameToGhcName :: TH.Name -> CoreM (Maybe Name)
thNameToGhcName th_name = do
hsc_env <- getHscEnv
liftIO $ initTcForLookup hsc_env (lookupThName_maybe th_name)
#endif
|
snoyberg/ghc
|
compiler/simplCore/CoreMonad.hs
|
bsd-3-clause
| 34,652 | 0 | 18 | 10,685 | 6,418 | 3,460 | 2,958 | 511 | 4 |
module AddLocalDecl2 where
-- |This is a function
foo = x -- comment 0
where p = 2 -- comment 1
-- |Another fun
bar = a -- comment 2
where nn = 2
p = 2 -- comment 3
|
mpickering/ghc-exactprint
|
tests/examples/transform/AddLocalDecl2.hs
|
bsd-3-clause
| 179 | 0 | 6 | 54 | 39 | 26 | 13 | 6 | 1 |
-----------------------------------------------------------------------------------------
{-| Module : ParseC
Copyright : (c) Daan Leijen 2003
License : BSD-style
Maintainer : [email protected]
Stability : provisional
Portability : portable
Basic Types
-}
-----------------------------------------------------------------------------------------
module Types( trace, traceIgnore, traceWarning, traceError
, errorMsg, errorMsgDecl
, Decl(..), Arg(..), Type(..), Strategy(..), CBaseType(..), argName
, Def(..), DefType(..)
) where
import System.IO.Unsafe ( unsafePerformIO )
{-----------------------------------------------------------------------------------------
Tracing
-----------------------------------------------------------------------------------------}
trace :: String -> t -> t
trace s x
= seq (unsafePerformIO (putStrLn s)) x
traceIgnore :: [Char] -> Decl -> t -> t
traceIgnore msg decl x
= trace ("ignore: " ++ fill 12 msg ++ ": " ++ declName decl) x
where
fill :: Int -> String -> String
fill n s | length s >= n = s
| otherwise = s ++ replicate (n - length s) ' '
traceWarning :: [Char] -> Decl -> t -> t
traceWarning msg decl x
= trace ("****************************************************\n" ++
"warning : " ++ msg ++ ": " ++ declName decl) x
traceError :: [Char] -> Decl -> t -> t
traceError msg decl x
= trace ("****************************************************\n" ++
"error : " ++ msg ++ ": " ++ declName decl) x
errorMsg :: [Char] -> t
errorMsg str
= error ("error: " ++ str)
errorMsgDecl :: Decl -> [Char] -> t
errorMsgDecl decl str
= errorMsg (str ++ " in " ++ declName decl ++ ": " ++ show decl)
{-----------------------------------------------------------------------------------------
(Eiffel) Definitions
-----------------------------------------------------------------------------------------}
data Def = Def{ defName :: String
, defValue :: Int
, defType :: DefType
}
deriving Show
data DefType = DefInt -- normal integer
| DefMask -- bit mask
deriving Show
{-----------------------------------------------------------------------------------------
(C) Declarations
-----------------------------------------------------------------------------------------}
data Decl = Decl{ declName :: String
, declRet :: Type
, declArgs :: [Arg]
, declComment :: String
}
deriving Show
data Arg = Arg{ argNames :: [String]
, argType :: Type
}
deriving Show
argName :: Arg -> String
argName arg
= concat (argNames arg)
data Type = Int CBaseType
| IntPtr
| Int64
| Word
| Word8
| Word32
| Word64
| Void
| Char
| Double
| Float
| Ptr Type
| ByteString Strategy
| ByteStringOut Strategy
| ByteStringLen
-- typedefs
| EventId
| Id
-- temporary types
| StringLen
| StringOut CBaseType
| PointOut CBaseType
| SizeOut CBaseType
| VectorOut CBaseType
| RectOut CBaseType
| ArrayLen
| ArrayStringOut CBaseType
| ArrayIntOut CBaseType
| ArrayIntPtrOut CBaseType
| ArrayObjectOut String CBaseType
-- derived types
| Object String
| String CBaseType
| ArrayInt CBaseType
| ArrayIntPtr CBaseType
| ArrayString CBaseType
| ArrayObject String CBaseType
| Bool
| Point CBaseType
| Size CBaseType
| Vector CBaseType
| Rect CBaseType
| RefObject String -- for "GetFont" etc. returns the font via an indirect reference!
| Fun String -- function pointers
| ColorRGB CBaseType
deriving (Eq,Show)
data Strategy = Lazy | Strict
deriving (Eq,Show)
data CBaseType = CVoid | CInt | CLong | CDouble | CChar | TimeT | SizeT | CObject
deriving (Eq,Show)
|
ekmett/wxHaskell
|
wxdirect/src/Types.hs
|
lgpl-2.1
| 4,359 | 0 | 12 | 1,375 | 860 | 495 | 365 | 93 | 1 |
module MaybeIn1 where
f x@(y:ys) = Just y
|
mpickering/HaRe
|
old/testing/simplifyExpr/MaybeIn1_TokOut.hs
|
bsd-3-clause
| 46 | 0 | 8 | 12 | 25 | 14 | 11 | 2 | 1 |
-- This is similar to T11982b but 'locker' inlined which allows the module to
-- compile.
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ApplicativeDo #-}
module Main where
import Control.Concurrent.MVar
type Locker = forall a. IO a -> IO a
main :: IO ()
main = do
line <- getLine
lock <- newMVar ()
f line $ withMVar lock . const
f :: String -> Locker -> IO ()
f line locker = locker $ putStrLn line
|
sdiehl/ghc
|
testsuite/tests/typecheck/should_compile/T11982c.hs
|
bsd-3-clause
| 411 | 0 | 9 | 92 | 121 | 62 | 59 | 12 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Program.Strip
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- This module provides an library interface to the @strip@ program.
module Distribution.Simple.Program.Strip (stripLib, stripExe)
where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Simple.Program
import Distribution.Simple.Utils
import Distribution.System
import Distribution.Verbosity
import Distribution.Version
import System.FilePath (takeBaseName)
runStrip :: Verbosity -> ProgramDb -> FilePath -> [String] -> IO ()
runStrip verbosity progDb path args =
case lookupProgram stripProgram progDb of
Just strip -> runProgram verbosity strip (args ++ [path])
Nothing -> unless (buildOS == Windows) $
-- Don't bother warning on windows, we don't expect them to
-- have the strip program anyway.
warn verbosity $ "Unable to strip executable or library '"
++ (takeBaseName path)
++ "' (missing the 'strip' program)"
stripExe :: Verbosity -> Platform -> ProgramDb -> FilePath -> IO ()
stripExe verbosity (Platform _arch os) progdb path =
runStrip verbosity progdb path args
where
args = case os of
OSX -> ["-x"] -- By default, stripping the ghc binary on at least
-- some OS X installations causes:
-- HSbase-3.0.o: unknown symbol `_environ'"
-- The -x flag fixes that.
_ -> []
stripLib :: Verbosity -> Platform -> ProgramDb -> FilePath -> IO ()
stripLib verbosity (Platform arch os) progdb path = do
case os of
OSX -> -- '--strip-unneeded' is not supported on OS X, iOS, AIX, or
-- Solaris. See #1630.
return ()
IOS -> return ()
AIX -> return ()
Solaris -> return ()
Windows -> -- Stripping triggers a bug in 'strip.exe' for
-- libraries with lots identically named modules. See
-- #1784.
return()
Linux | arch == I386 ->
-- Versions of 'strip' on 32-bit Linux older than 2.18 are
-- broken. See #2339.
let okVersion = orLaterVersion (mkVersion [2,18])
in case programVersion =<< lookupProgram stripProgram progdb of
Just v | withinRange v okVersion ->
runStrip verbosity progdb path args
_ -> warn verbosity $ "Unable to strip library '"
++ (takeBaseName path)
++ "' (version of 'strip' too old; "
++ "requires >= 2.18 on 32-bit Linux)"
_ -> runStrip verbosity progdb path args
where
args = ["--strip-unneeded"]
|
mydaum/cabal
|
Cabal/Distribution/Simple/Program/Strip.hs
|
bsd-3-clause
| 2,922 | 0 | 20 | 881 | 542 | 285 | 257 | 46 | 8 |
{-# LANGUAGE QuasiQuotes #-}
module Main where
import Expr
main :: IO ()
main = do case [expr|1 + 2|] of
[expr|$x + $x|] -> print x
_ -> return ()
|
olsner/ghc
|
testsuite/tests/quasiquotation/qq006/Main.hs
|
bsd-3-clause
| 190 | 0 | 11 | 74 | 58 | 34 | 24 | 7 | 2 |
{-# LANGUAGE ViewPatterns,DeriveDataTypeable #-}
module T4371 where
import Data.Typeable
data E1 = E1 deriving Typeable
data E2 = E2 deriving Typeable
f :: Typeable a => a-> ()
f x = case x of
(cast -> Just E1) -> ()
(cast -> Just E2) -> ()
|
tibbe/ghc
|
testsuite/tests/deSugar/should_compile/T4371.hs
|
bsd-3-clause
| 248 | 0 | 10 | 53 | 96 | 52 | 44 | 9 | 2 |
module Matrix where
import Data.List (sort)
type Row a = [a]
type Col a = [a]
type Matrix a = [Row a]
cp :: [[a]] -> [[a]]
cp [] = [[]]
cp (xs:xss) = [x:ys | x <- xs, ys <- yss]
where yss = cp xss
m1 :: Matrix Int
m1 = [[1,2,3],[4,5,6],[7,8,9]]
m2 :: Matrix Int
m2 = [[7,8,9],[4,5,6],[1,2,3]]
-- |Identity matrix
m3 :: Matrix Int
m3 = [[1,0,0],[0,1,0],[0,0,1]]
transpose :: Matrix a -> Matrix a
transpose [xs] = [[x] | x <- xs]
transpose (xs:xss) = zipWith (:) xs (transpose xss)
--
-- Exercise A
--
add1 :: Num a => Matrix a -> Matrix a
add1 = map (map (+1))
sumM :: Num a => Matrix a -> a
sumM = sum . map sum
addM :: Num a => Matrix a -> Matrix a -> Matrix a
addM = zipWith $ zipWith (+)
-- |Multiplies a matrix row with a col
multRowCol :: Num a => Row a -> Col a -> a
multRowCol m1 m2 = sum (zipWith (*) m1 m2)
-- |Multiplies a row with the given cols, resulting in a row
multRowCols :: Num a => Row a -> [Col a] -> Row a
multRowCols r = zipWith multRowCol (repeat r)
-- |Matrix multiplication
multM :: Num a => Matrix a -> Matrix a -> Matrix a
multM m1 m2 = map (flip multRowCols cols) m1
where
cols = transpose m2
--
-- Exercise B
--
-- [[],[]] -> 2,0 Matrix
-- [] -> 0,undefined Matrix
transpose' :: Matrix a -> Matrix a
transpose' [] = [[]]
transpose' (xs:xss) = zipWith (:) xs (transpose xss)
transpose'' :: Matrix a -> Matrix a
transpose'' ([]:xss) = []
transpose'' xss = map head xss : transpose'' (map tail xss)
--
-- Exercise C
--
-- any p = not . all (not p) -- true
-- any null = null . cp -- true
-- Both true
--
-- Exercise D
--
nodups :: (Ord a) => [a] -> Bool
nodups xs = and $ zipWith (/=) ys (tail ys)
where ys = sort xs
--
-- Exercise E
--
nub :: (Eq a) => [a] -> [a]
nub [] = []
nub (x:xs) = x:(nub $ filter (/= x) xs)
nub' :: (Ord a) => [a] -> [a]
nub' xs = nub'' $ sort xs
where
nub'' [] = []
nub'' (x:xs) = x:(nub'' $ dropWhile (== x) xs)
--
-- Exercise F
--
takeWhile' :: (a -> Bool) -> [a] -> [a]
takeWhile' _ [] = []
takeWhile' p (x:xs)
| p x = x:takeWhile' p xs
| otherwise = []
dropWhile' :: (a -> Bool) -> [a] -> [a]
dropWhile' _ [] = []
dropWhile' p (x:xs)
| p x = dropWhile' p xs
| otherwise = x:xs
whiteSpace :: Char -> Bool
whiteSpace ' ' = True
whiteSpace '\t' = True
whiteSpace _ = False
type Word = String
words' :: String -> [Matrix.Word]
words' [] = []
words' xs = word : words' rest
where
dropped = dropWhile whiteSpace xs
word = takeWhile (not . whiteSpace) dropped
rest = dropWhile (not . whiteSpace) dropped
--
-- Exercise G
--
minimum :: Ord a => [a] -> a
minimum = head . sort
--
-- Exercise H
--
-- So that we prune as soon as possible in all cases
|
dirkz/Thinking_Functionally_With_Haskell
|
5/Matrix.hs
|
isc
| 2,675 | 0 | 11 | 637 | 1,323 | 719 | 604 | 70 | 2 |
reciprocal :: Int -> (String, Int)
reciprocal n | n > 1 = ('0' : '.' : digits, recur)
| otherwise = error
"attempting to compute reciprocal of number <= 1"
where
(digits, recur) = divide n 1 []
divide :: Int -> Int -> [Int] -> (String, Int)
divide n c cs | c `elem` cs = ([], position c cs)
| r == 0 = (show q, 0)
| r /= 0 = (show q ++ digits, recur)
where
(q, r) = (c*10) `quotRem` n
(digits, recur) = divide n r (c:cs)
position :: Int -> [Int] -> Int
position n (x:xs) | n==x = 1
| otherwise = 1 + position n xs
showRecip :: Int -> String
showRecip n =
"1/" ++ show n ++ " = " ++
if r==0 then d else take p d ++ "(" ++ drop p d ++ ")"
where
p = length d - r
(d, r) = reciprocal n
main = mainn 4
mainn 0 = return ()
mainn n = do
number <- readLn
putStrLn (showRecip number)
mainn (pred n)
|
hpacheco/HAAP
|
examples/minimalistic/HPCTest.hs
|
mit
| 903 | 2 | 10 | 296 | 466 | 236 | 230 | 26 | 2 |
module Language.ElementaryArithmetic(
exprParser
, interpFile
, interpString
)where
-- Author: [email protected]
-- Last Modified:2016年02月09日 星期二 19时09分51秒 二
import Control.Applicative hiding ((<|>))
import Data.String
import Text.ParserCombinators.Parsec
data Expr = Add Expr Expr2 | Sub Expr Expr2 | E2 Expr2
data Expr2 = Mul Expr2 Expr3 | Div Expr2 Expr3 | E3 Expr3
data Expr3 = Quote Expr | NumLit Int
class Valuable a where
eval :: a -> Int
instance Valuable Expr where
eval (Add e1 e2) = eval e1 + eval e2
eval (Sub e1 e2) = eval e1 - eval e2
eval (E2 e) = eval e
instance Valuable Expr2 where
eval (Mul e1 e2) = eval e1 * eval e2
eval (Div e1 e2) = eval e1 `quot` eval e2
eval (E3 e) = eval e
instance Valuable Expr3 where
eval (Quote e) = eval e
eval (NumLit n) = n
instance Show Expr where
show (Add e1 e2) = "(+ " ++ show e1 ++ " " ++ show e2 ++ ")"
show (Sub e1 e2) = "(- " ++ show e1 ++ " " ++ show e2 ++ ")"
show (E2 e) = show e
instance Show Expr2 where
show (Mul e1 e2) = "(* " ++ show e1 ++ " " ++ show e2 ++ ")"
show (Div e1 e2) = "(/ " ++ show e1 ++ " " ++ show e2 ++ ")"
show (E3 e) = show e
instance Show Expr3 where
show (NumLit e) = show e
show (Quote e) = show e
exprParser :: Parser Expr
exprParser = chainl1 (E2 <$> expr2Parser) exprOp
where exprOp = do op <- Add <$ char '+' <|> Sub <$ char '-' <?> "Addition(+) or subtraction(-)"
return $ \e1 (E2 e2) -> op e1 e2
expr2Parser :: Parser Expr2
expr2Parser = chainl1 (E3 <$> expr3Parser) expr2Op
where expr2Op = do op <- Mul <$ char '*' <|> Div <$ char '/' <?> "Multiplication(*) or division(/)"
return $ \e1 (E3 e2) -> op e1 e2
expr3Parser :: Parser Expr3
expr3Parser = quoteParser <|> literalParser <?> "Quoted expresion or number literal"
where quoteParser = Quote <$> between (char '(') (char ')') exprParser
literalParser = NumLit <$> read <$> many1 digit
interpString :: String -> IO ()
interpString exprStr = let expr = filter (/= ' ') exprStr
in case parse (exprParser <* eof) "Elementary Arithmetic" expr of
Left err -> error $ show err
Right parsed -> print parsed
>> (print $ eval parsed)
interpFile :: String -> IO ()
interpFile file = readFile file
>>= interpString
|
Alaya-in-Matrix/Anjelica
|
src/Language/ElementaryArithmetic.hs
|
mit
| 2,560 | 0 | 14 | 802 | 930 | 462 | 468 | 55 | 2 |
{-# LANGUAGE NoMonomorphismRestriction #-}
module DetermineTheType where
-- Num a => a
example = 1
-- Num a => a
a = (* 9) 6
-- Num t => (t, [Char])
b = head [(0,"doge"), (1,"kitteh")]
-- [(Integer, [Char])]
c = [(0 :: Integer ,"doge"),(1,"kitteh")]
-- Bool
d = if False then True else False
-- Int
e = length [1, 2, 3, 4, 5]
-- Bool
f = (length [1, 2, 3, 4]) > (length "TACOCAT")
x = 5
y = x + 5
-- Num a => a
w = y * 10
-- Num a => a -> a
z y = y * 10
-- Fractional a => a
f2 = 4 / y
x2 = "Julie"
y2 = " <3 "
z2 = "Haskell"
-- [Char]
f3 = x2 ++ y2 ++ z2
|
candu/haskellbook
|
ch5/determineTheType.hs
|
mit
| 568 | 0 | 8 | 150 | 226 | 139 | 87 | 18 | 2 |
-- Gradually Adding Parameters
-- http://www.codewars.com/kata/555b73a81a6285b6ce000047/
module Codewars.Kata.AddingParameters where
add :: Num a => [a] -> a
add = fst . foldl (\(s, m) v -> (s + m * v, m + 1) ) (0, 1)
|
gafiatulin/codewars
|
src/7 kyu/AddingParameters.hs
|
mit
| 221 | 0 | 11 | 40 | 82 | 48 | 34 | 3 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
import Control.Lens as Lens
import Data.Aeson as Aeson
import Data.Aeson.Lens as Lens
import Data.Foldable (traverse_)
import Data.Map.Strict as Map
import Data.Text as Text
import Database.SQLite.Simple as Sql
import Network.HTTP.Simple as Http
import System.IO.Unsafe (unsafePerformIO)
getSymbolList :: IO [Text]
getSymbolList = do
minfo <- getResponseBody <$> httpJSON "https://api.binance.com/api/v3/exchangeInfo" :: IO Value
pure $ minfo ^.. key "symbols" . values . key "symbol" . _String . filtered ("USDT" `isSuffixOf`)
getKLines :: Text -> IO [(Integer, Double, Double)]
getKLines symbol = do
let limit = 1000
let rstr =
"https://api.binance.com/api/v3/klines?symbol="
<> Text.unpack symbol
<> "&interval=1h&limit="
<> show limit
putStrLn $ "Getting " <> rstr
klines <- getResponseBody <$> httpJSON (parseRequest_ rstr) :: IO Value
pure . Prelude.zipWith (\t (o, c) -> (t, o, c)) [0 .. limit - 1] . fmap ((,) <$> (^?! nth 1 . _String . to (read . Text.unpack)) <*> (^?! nth 4 . _String . to (read . Text.unpack))) . toListOf values $ klines
{-# NOINLINE sqlConn #-}
sqlConn :: Connection
sqlConn = unsafePerformIO . Sql.open $ "market.db"
main :: IO ()
main = do
execute_ sqlConn "create table market (time integer, symbol text, open real, close real, unique (time, symbol))"
withExclusiveTransaction sqlConn $
getSymbolList >>= traverse (\s -> (s,) . fmap (s,) <$> getKLines s) >>= traverse_ insertAll
where
insertAll (s, rows) = do
putStrLn . Text.unpack $ "Inserting " <> s
executeMany sqlConn "insert into market values (?, ?, ?, ?)" . fmap (\(s, (t, o, c)) -> (t, s, o, c)) $ rows
|
nicball/playground
|
invest/getdata.hs
|
mit
| 1,744 | 0 | 19 | 339 | 582 | 315 | 267 | -1 | -1 |
--
-- GTKTest.hs
-- Sandbox for GTK+ 3 experiments
--
-- Jonatan H Sundqvist
-- January 13 2015
--
-- TODO | - Carmichael numbers
-- -
-- SPEC | -
-- -
-- ghc --make GTKTest.hs -odir bin/ -o bin/GTKTest
import Graphics.UI.Gtk
import Graphics.UI.Gtk.Builder
import Control.Monad.Trans (liftIO)
hello :: (ButtonClass o) => o -> IO ()
hello b = do
set b [buttonLabel := "Hello World: " ++ (show 5)]
putStrLn "Changing button text"
oneButton :: IO ()
oneButton = do
initGUI
window <- windowNew
button <- buttonNew
scale <- adjustmentNew 20 10 360 0.2 100 150
set window [windowDefaultWidth := 60,
windowDefaultHeight := 30,
containerChild := button,
containerBorderWidth := 10]
set button [buttonLabel := "Hola Mundo"]
on button buttonActivated (hello button)
on window objectDestroy mainQuit
window `on` configureEvent $ do
(width, height) <- eventSize
liftIO $ do
putStrLn (show width ++ " x " ++ show height)
set button [buttonLabel := unwords ["Width :", show width, "| Height:", show height]]
return False
--onDestroy window mainQuit
widgetShowAll window
mainGUI
emptyWindow :: IO ()
emptyWindow = do
initGUI
window <- windowNew
window `on` deleteEvent $ liftIO mainQuit >> return False
-- i.e., on window deleteEvent (liftIO mainQuit >> return False)
widgetShowAll window
mainGUI
example :: IO ()
example = do
initGUI
window <- windowNew
label <- labelNew $ Just "Hello, world from gtk2hs!"
set window [ containerBorderWidth := 20,
containerChild := label ]
window `on` deleteEvent $ liftIO mainQuit >> return False
widgetShowAll window
mainGUI
withGlade :: IO ()
withGlade = do
initGUI
gui <- builderNew
builderAddFromFile gui "assets/interface/sampleOne.glade"
window <- builderGetObject gui castToWindow "toplevel"
window `on` deleteEvent $ liftIO mainQuit >> return False
--void $ on window deleteEvent $ liftIO (mainQuit >> return False)
widgetShowAll window
mainGUI
main :: IO ()
main = withGlade
|
SwiftsNamesake/ElegantChess
|
GTKTest.hs
|
mit
| 2,066 | 2 | 18 | 456 | 585 | 285 | 300 | 56 | 1 |
{- applyinhg Key and copy texture without scaling -}
{-# LANGUAGE OverloadedStrings #-}
module Lesson10 where
--
import qualified SDL
--
import Data.Word (Word8(..))
import Linear.Affine (Point(..))
import Linear.V2 (V2(..))
import Linear.V4 (V4(..))
import Foreign.C.Types (CInt)
--
import Control.Concurrent (threadDelay)
import Control.Monad (unless)
--
import qualified Config
--
-- definition of LTextureexture
data LTexture = LTexture {getTx :: SDL.Texture, getWH :: (V2 CInt)}
--
class Renderable a where
renderQuad :: a -> CInt -> CInt -> Maybe (SDL.Rectangle CInt)
render :: SDL.Renderer -> a -> CInt -> CInt -> IO ()
--
instance Renderable LTexture where
renderQuad ltx x y =
Just $ SDL.Rectangle (P $ V2 x y) $ getWH ltx
render rdr ltx x y = do
SDL.copy rdr (getTx ltx) Nothing (renderQuad ltx x y)
-- definition of key
yellowKey :: Maybe (V4 Word8)
yellowKey = Just $ V4 maxBound maxBound 0 maxBound
-- definition of loading function
loadFromFile :: SDL.Renderer -> FilePath -> IO LTexture
loadFromFile rdr path = do
tempSf <- SDL.loadBMP path
wh <- SDL.surfaceDimensions tempSf
-- ************ --
SDL.surfaceColorKey tempSf SDL.$= yellowKey
-- ************ --
tx <- SDL.createTextureFromSurface rdr tempSf
SDL.freeSurface tempSf
return (LTexture tx wh)
-- note: in lazyfoo's tutorial, the function mapRGB is called
-- however, in sdl2 haskell binding,
-- mapRGB is not needed and thus flagged as deprecated
--
lesson10 :: IO ()
lesson10 = do
SDL.initialize [SDL.InitVideo]
window <- SDL.createWindow "Lesson10" Config.winConfig
renderer <- SDL.createRenderer window (-1) Config.rdrConfig
SDL.HintRenderScaleQuality SDL.$= SDL.ScaleLinear
SDL.rendererDrawColor renderer SDL.$=
V4 maxBound maxBound minBound maxBound
SDL.showWindow window
imgBg <- loadFromFile renderer "./img/10/bg.bmp"
imgHumanish <- loadFromFile renderer "./img/10/humanish.bmp"
let
loop = do
events <- SDL.pollEvents
let quit = any (== SDL.QuitEvent) $ map SDL.eventPayload events
-- *** beginning of drawing region ***
SDL.rendererDrawColor renderer SDL.$=
V4 minBound minBound maxBound maxBound
SDL.clear renderer
-- render with our own function
render renderer imgBg 0 0
render renderer imgHumanish 240 190
--
SDL.present renderer
-- *** end of drawing region ***
threadDelay 20000
unless quit loop
loop
SDL.destroyWindow window
SDL.destroyRenderer renderer
SDL.quit
--
|
jaiyalas/sdl2-examples
|
src/Lesson10.hs
|
mit
| 2,598 | 0 | 18 | 578 | 725 | 364 | 361 | 57 | 1 |
module Skull ( SkullGame
, SkullInfo(..)
, Log(..)
, initSkullInfo
, command
) where
import Message
import Data.List
import Control.Monad.State
import qualified Data.Map.Strict as Map
import Data.Random.Extras (shuffle)
import Data.Random.RVar (runRVar)
import Data.Random.Source.DevRandom (DevRandom( DevURandom ))
import Data.Char (toLower)
type Player = String
data Head = Skull | Flower
instance Show Head where
show Skull = "C"
show Flower = "F"
data Log = Public String | Private String String
type Card = Either Head Head
type Hand = [Head]
type Stack = [Card]
data Phase = Preparation | StackConstruction | Bidding | Resolution
deriving Eq
instance Show Phase where
show Preparation = "Préparation"
show StackConstruction = "Construction des piles"
show Bidding = "Enchères"
show Resolution = "Résolution"
data Status = Status { hand :: Hand, stack :: Stack, points :: Int }
data SkullInfo = SkullInfo { phase :: Phase
, players :: [Player]
, statuses :: Map.Map Player Status
, activePlayer :: Maybe Player
, lastBid :: Maybe Player
, currentBid :: Int
, logs :: [Log]
}
type SkullGame m = StateT SkullInfo m
initSkullInfo :: SkullInfo
initSkullInfo = SkullInfo Preparation [] Map.empty Nothing Nothing 0 []
initStatus :: Status
initStatus = Status [Skull, Flower, Flower, Flower] [] 0
updateStatus :: (Monad m) => Player -> (Status -> Status) -> SkullGame m ()
updateStatus p f = modify (\sk -> sk { statuses = Map.adjust f p $ statuses sk})
addPlayer :: (Monad m) => Player -> SkullGame m ()
addPlayer p = do
modify (\s -> s { players = players s ++ [p]
, statuses = Map.insert p (initStatus) (statuses s) })
publicLog $ p ++ " a rejoint la partie !"
removePlayer :: (Monad m) => Player -> SkullGame m ()
removePlayer p = do
modify (\s -> s { players = delete p (players s), statuses = Map.delete p (statuses s) })
ap' <- gets activePlayer
when (Just p == ap') $ nextPlayer
command :: (Monad m) => Message -> SkullGame m ()
command m | c == "!play" = play
| c == "!join" = joinPlayer (author m)
| any (c ==) ["!nbcards", "!nbcard","!nb"] = do
nbcards <- countStacks
publicLog $ "Il reste " ++show nbcards ++" cartes."
| c == "!recap" = recapAll
| c == "!quit" = quit >> publicLog "Partie annulée."
| c == "!suiv" = do
active <- gets activePlayer
case active of
Just p -> publicLog p
Nothing -> return ()
| "!bid" `isPrefixOf` c = bid m
| c == "!pass" = pass m
| "!pop" `isPrefixOf` c = pop m
| "!push" `isPrefixOf` c = push m
| "!rm " `isPrefixOf` c = removePlayer $ drop 4 c
| c == "!phase" = gets phase >>= (publicLog . show)
| otherwise = return ()
where c = content m
play :: (Monad m) => SkullGame m ()
play = do
phase' <- gets phase
players' <- gets players
when (phase' == Preparation) $ do
changePhase StackConstruction
publicLog $ "Ordre: " ++pprint players'++"."
nextPlayer
newTurn
where pprint = concat . intersperse ", "
changePhase :: (Monad m) => Phase -> SkullGame m ()
changePhase p = do
modify (\s -> s {phase = p})
publicLog $ "On passe en phase de "++(map toLower $ show p)++" !"
when (p == Resolution) $ do
ap' <- gets activePlayer
case ap' of
Just p' -> popSelf p' >> recapAll
Nothing -> return ()
quit :: (Monad m) => SkullGame m ()
quit = modify $ const initSkullInfo
joinPlayer :: (Monad m) => Player -> SkullGame m ()
joinPlayer p = do
players' <- gets players
phase' <- gets phase
when (phase' == Preparation && (not $ p `elem` players')) $ addPlayer p
countStacks :: (Monad m) => SkullGame m Int
countStacks = gets $ Map.foldl' (flip $ (+) . length . stack) 0 . statuses
recapPlayer :: (Monad m) => Player -> SkullGame m ()
recapPlayer p = do
hs <- gets $ Map.lookup p . statuses
case hs of
Just s -> publicLog $
p ++ ": " ++ show (length $ hand s) ++ " cartes, " ++
show (points s) ++ " point, " ++
showStack (stack s) ++ "."
Nothing -> return ()
showStack :: Stack -> String
showStack [] = "[]"
showStack s = concat . map (either (const "*") show) $ s
recapAll :: (Monad m) => SkullGame m ()
recapAll = do
skull <- get
mapM_ recapPlayer $ players skull
case lastBid skull of
Just p -> when (currentBid skull > 0) $
if (phase skull == Resolution)
then publicLog $ p ++ " doit encore retourner " ++ show (currentBid skull) ++ " fleurs."
else publicLog $ p ++ " pense pouvoir retourner " ++ show (currentBid skull) ++ " fleurs."
Nothing -> return ()
nextPlayer :: (Monad m) => SkullGame m ()
nextPlayer = do
sk <- get
case players sk of
[] -> modify (\s -> s { activePlayer = Nothing })
(x:xs) -> do
modify (\s -> s { activePlayer = Just x, players = xs ++ [x] })
publicLog $ "C'est au tour de "++x++" !"
when (Just x == lastBid sk && phase sk == Bidding) $ changePhase Resolution
bid :: (Monad m) => Message -> SkullGame m ()
bid m = do
skull <- get
max_bid <- countStacks
rdy <- allPlayed
let bidTry = read $ drop 5 $ content m
when ((phase skull == Bidding ||
(phase skull == StackConstruction && rdy)) &&
(Just $ author m) == activePlayer skull &&
bidTry <= max_bid &&
bidTry > currentBid skull) $ do
modify (\s -> s { lastBid = Just $ author m
, currentBid = bidTry})
if (bidTry == max_bid)
then changePhase Resolution
else (changePhase Bidding) >> nextPlayer
where
allPlayed = do
st <- gets statuses
return $ Map.foldl' (\acc s -> acc && (length $ stack s) >= 1) True st
pass :: (Monad m) => Message -> SkullGame m ()
pass m = do
skull <- get
when ((Just $ author m) == activePlayer skull &&
phase skull == Bidding) $ do
nextPlayer
when (activePlayer skull == lastBid skull) $ changePhase Resolution
popSelf :: (Monad m) => Player -> SkullGame m ()
popSelf p = do
go <- popOthers p p
currentBid' <- gets currentBid
when (currentBid' > 0 && go) $ popSelf p
pop :: (Monad m) => Message -> SkullGame m ()
pop m = do
sk <- get
when ((Just $ author m) == activePlayer sk &&
phase sk == Resolution) $ do
let target = drop 5 $ content m
_ <- popOthers (author m) target
recapAll
popOthers :: (Monad m) => Player -> Player -> SkullGame m Bool
popOthers p target = do
h <- revealCard target
case h of
Nothing -> return False
Just Flower -> do
modify (\sk -> sk { currentBid = currentBid sk - 1 })
skull <- get
if (currentBid skull <= 0)
then (updatePoints p) >> return False
else return True
Just Skull -> do
publicLog $ p ++" a pioché un crâne !"
removeCard p
newTurn
return False
updatePoints :: (Monad m) => Player -> SkullGame m ()
updatePoints p' = do
st <- gets statuses
case Map.lookup p' st of
Just s -> if points s > 0
then quit >> (publicLog $ "Félicitations "++p'++", tu as gagné !")
else do
updateStatus p' (\s' -> s' {points = points s + 1})
publicLog $ "Bien joué "++p'++", tu marques un point !"
newTurn
Nothing -> return ()
newTurn :: (Monad m) => SkullGame m ()
newTurn = do
collectAllStacks
modify (\sk -> sk { phase = StackConstruction
, lastBid = Nothing
, currentBid = 0
})
gets players >>= mapM_ notifHand
notifHand :: (Monad m) => Player -> SkullGame m ()
notifHand p = do
sk <- get
case Map.lookup p (statuses sk) of
Just s -> printHand p $ hand s
Nothing -> return ()
where
printHand p' h =
privateLog p' $ concat $ intersperse " " $
zipWith (\i c -> show i ++ ":" ++ show c) [(0 :: Integer)..] h
collectStack :: Status -> Status
collectStack s = s { hand = hand s ++ map (either id id) (stack s), stack = [] }
collectAllStacks :: (Monad m) => SkullGame m ()
collectAllStacks = do
sk <- get
forM_ (players sk) $ \p -> do
case Map.lookup p $ statuses sk of
Nothing -> return ()
Just s -> do
let s' = collectStack s
h <- return $ listShuffle $ hand s'
updateStatus p (\_ -> s' {hand = h})
removeCard :: (Monad m) => Player -> SkullGame m ()
removeCard p = updateStatus p (\s -> s {hand = tail $ hand s})
popStack :: Stack -> (Maybe Head, Stack)
popStack s = aux [] s
where
aux acc [] = (Nothing, acc)
aux acc (c@(Right _):cs) = aux (acc ++ [c]) cs
aux acc ((Left h):cs) = (Just h, acc ++ [Right h] ++ cs)
push :: (Monad m) => Message -> SkullGame m ()
push m = do
let p = author m
n = read $ drop 6 $ content m
ap' <- gets activePlayer
phase' <- gets phase
when (Just p == ap' && phase' == StackConstruction) $ do
playCard p n
notifHand p
playCard :: (Monad m) => Player -> Int -> SkullGame m ()
playCard p i = do
st <- gets statuses
case Map.lookup p st of
Nothing -> return ()
Just s -> when (i>=0 && i < (length $ hand s)) $ do
updateStatus p (pour i)
nextPlayer
where
pour i' s = s { stack = (Left $ (hand s) !! i') : stack s
, hand = removeNth i' $ hand s}
removeNth _ [] = []
removeNth 0 (_:l) = l
removeNth n (x:xs) = x : removeNth (n-1) xs
revealCard :: (Monad m) => Player -> SkullGame m (Maybe Head)
revealCard p = do
st <- gets statuses
case Map.lookup p st of
Nothing -> return Nothing
Just s ->
let (h, s') = popStack $ stack s in
case h of
Nothing -> return Nothing
Just _ -> do
updateStatus p (\st' -> st' {stack = s'})
return h
publicLog :: (Monad m) => String -> SkullGame m ()
publicLog s = modify (\sk -> sk { logs = (logs sk) ++ [Public s] })
privateLog :: (Monad m) => String -> String -> SkullGame m ()
privateLog p s = modify (\sk -> sk { logs = (logs sk) ++ [Private p s] })
listShuffle :: [a] -> [a]
listShuffle = id
--listShuffle l = runRVar (shuffle l) DevURandom
help :: (Monad m) => String -> SkullGame m ()
help user = mapM_ (privateLog user) $ lines helpMsg
where
helpMsg = "\
\ !help: Afficher cette aide.\n\
\ !play: Lancer une partie.\n\
\ !quit: Annule la partie courante.\n\
\ !join: Rejoindre une partie en cours de création.\n\
\ !rm nick: Retirer un joueur de la partie.\n\
\ !suiv: Afficher le nom du joueur courant.\n\
\ !nbcards, !nbcard!, !nb: Afficher le total des cartes dans toutes les piles.\n\
\ !recap: Afficher un récapitulatif de la partie en cours.\n\
\ !pass: Passer son tour (si les règles le permettent).\n\
\ !phase: Afficher la phase de jeu en cours.\n\
\ !push cardnb: Poser une carte face cachée devant soi.\n\
\ La correspondance carte/numero est notifiée régulièrement.\n\
\ !pop nick: Retourner la première carte cachée d'un joueur.\n\
\ !bid nb: Faire une annonce.\n\
\ "
|
gchelfi/skullbot
|
Skull.hs
|
mit
| 11,297 | 0 | 21 | 3,303 | 4,353 | 2,174 | 2,179 | 278 | 4 |
{-
H-99 Problems
Copyright 2015 (c) Adrian Nwankwo (Arcaed0x)
Problem : 16
Description : Drop every N'th element from a list.
License : MIT (See LICENSE file)
-}
dropEveryN :: [a] -> Int -> [a]
dropEveryN [] _ = []
dropEveryN lst y = dropOn lst y
where dropOn [] _ = []
dropOn (x:xs) n = if n == 1
then dropOn xs y
else x : dropOn xs (n - 1)
|
Arcaed0x/H-99-Solutions
|
src/prob16.hs
|
mit
| 438 | 0 | 11 | 169 | 116 | 60 | 56 | 7 | 3 |
import Test.QuickCheck
import Text.Printf
import Control.Applicative
import Control.Monad
import Automaton
import Data.List
import Debug.Trace
tests = [
("DFA Negation", quickCheck propNegateDFA),
("DFA Union", quickCheck propUnionDFA),
("DFA Intersection", quickCheck propIntersectDFA),
("DFA Concatenation", quickCheck propConcatenateDFA),
("DFA Kleene Star", quickCheck propKleeneStarDFA),
("NFA Negation", quickCheck propNegateNFA),
("NFA Union", quickCheck propUnionNFA),
("NFA Intersection", quickCheck propIntersectNFA),
("NFA Concatenation", quickCheck propConcatenateNFA),
("NFA Kleene Star", quickCheck propKleeneStarNFA),
("DFA => NFA", quickCheck propDFA2NFA),
("DFA <= NFA", quickCheck propNFA2DFA),
("DFA Minimization", quickCheck propMinimizeDFA)
]
main = mapM_ (\(s,a) -> printf "%-25s: " s >> a) tests
bana test = replicateM_ 10 (quickCheck test)
--mains = replicateM_ 100 $ sample . join $ genMapping <$> genStates <*> genAlphabets
--main = q propNFA2DFA
------------------------------------------------------------------------
-- test data
-- dfa minimization test data
statesMin = [0..7]
alphabetsMin = [Alphabet '0', Alphabet '1']
mappingsMin = TransitionsDFA [
(0, Alphabet '0', 1),
(0, Alphabet '1', 5),
(1, Alphabet '0', 6),
(1, Alphabet '1', 2),
(2, Alphabet '0', 0),
(2, Alphabet '1', 2),
(3, Alphabet '0', 2),
(3, Alphabet '1', 6),
(4, Alphabet '0', 7),
(4, Alphabet '1', 5),
(5, Alphabet '0', 2),
(5, Alphabet '1', 6),
(6, Alphabet '0', 6),
(6, Alphabet '1', 4),
(7, Alphabet '0', 6),
(7, Alphabet '1', 2)
]
startMin = 0
acceptsMin = [2]
dfa = DFA statesMin alphabetsMin mappingsMin startMin acceptsMin
------------
statesM' = [0 .. 2]
alphabetsM' = [Alphabet '1', Alphabet '0']
mappingsM' = TransitionsDFA [
(0, Alphabet '0', 1),
(0, Alphabet '1', 2),
(1, Alphabet '0', 2),
(1, Alphabet '1', 0),
(2, Alphabet '0', 0),
(2, Alphabet '1', 2)
]
startM' = 0
acceptsM' = [1]
dfam' = DFA statesM' alphabetsM' mappingsM' startM' acceptsM'
------------
statesEq = [0 .. 2]
alphabetsEq = [Alphabet '1', Alphabet '0']
mappingsEq = TransitionsDFA [
(0, Alphabet '0', 0),
(0, Alphabet '1', 0)
]
startEq = 0
acceptsEq = [0]
dfae = DFA statesEq alphabetsEq mappingsEq startEq acceptsEq
---------
statesN = [1 .. 3]
alphabetsN = [Alphabet 'a', Alphabet 'b']
mappingsN = TransitionsNFA [
(1, Alphabet 'a', [2]),
(1, Alphabet 'b', [3]),
(2, Alphabet 'a', [1]),
(2, Alphabet 'b', [2]),
(3, Alphabet 'a', [2]),
(3, Alphabet 'b', [1])
]
startN = 1
acceptsN = [2, 3]
nfa = NFA statesN alphabetsN mappingsN startN acceptsN
statesM = [0 .. 1]
alphabetsM = [Alphabet 'a', Alphabet 'b']
mappingsM = TransitionsNFA [
(0, Epsilon, [1]),
(0, Alphabet 'b', [1]),
(1, Alphabet 'a', [0])
]
startM = 0
acceptsM = [1]
nfam = NFA statesM alphabetsM mappingsM startM acceptsM
---
statesF = [656, 101, 497]
alphabetsF = [Alphabet 'z']
mappingsF = TransitionsDFA [
(656, Alphabet 'z', 101),
(101, Alphabet 'z', 656),
(497, Alphabet 'z', 101)
]
startF = 497
acceptsF = [656]
dfaf = DFA statesF alphabetsF mappingsF startF acceptsF
---
r = read "a b" :: RE
run = automatonN $ re2nfa r
a = NFA [0] [] (TransitionsNFA []) 0 []
n = nfa2gnfa nfa
statesP = [0 .. 3]
alphabetsP = [Alphabet '0', Alphabet '1']
salphabetsP = [Alphabet '0', Alphabet '$']
transitionsP = TransitionsPDA [
(0, Epsilon, Epsilon, 1, Alphabet '$'),
(1, Alphabet '1', Alphabet '0', 2, Epsilon),
(1, Alphabet '0', Epsilon, 1, Alphabet '0'),
(2, Alphabet '1', Alphabet '0', 2, Epsilon),
(2, Alphabet '1', Alphabet '$', 3, Epsilon)
]
startP = 0
startStackP = Epsilon
acceptsP = [0, 3]
pda = PDA statesP alphabetsP salphabetsP transitionsP startP startStackP acceptsP
------------------------------------------------------------------------
-- generators
genStates :: Gen States
genStates = fmap nub . listOf1 $ seed
where seed = choose (0, 1000) :: Gen Int
genAlphabets :: Gen Alphabets
genAlphabets = nub <$> (listOf1 . elements $ Alphabet <$> ['a' .. 'z'])
genLanguage :: Alphabets -> Gen Language
genLanguage = listOf . elements . map rip
where rip (Alphabet x) = x
genMapping :: States -> Alphabets -> Gen Transitions
genMapping states alphabets =
fmap TransitionsDFA $ sequence $ map extend pairs
where pair a b = (a, b)
pairs = pair <$> states <*> alphabets
extend (a, b) = do
c <- elements states
return (a, b, c)
genTransitionNFA :: States -> Alphabets -> Gen Transitions
genTransitionNFA states alphabets =
fmap TransitionsNFA $ sequence $ map extend pairs
where pair a b = (a, b)
pairs = pair <$> states <*> alphabets
extend (a, b) = do
c <- fmap nub . listOf1 . elements $ states
return (a, b, c)
genDFA :: States -> Alphabets -> Gen DFA
genDFA states alphabets = do
start <- elements states
accepts <- fmap nub . listOf . elements $ states
mappings <- genMapping states alphabets
return $ DFA states alphabets mappings start accepts
genNFA :: States -> Alphabets -> Gen NFA
genNFA states alphabets = do
start <- elements states
accepts <- fmap nub . listOf . elements $ states
mappings <- genTransitionNFA states alphabets
return $ NFA states alphabets mappings start accepts
------------------------------------------------------------------------
------------------------------------------------------------------------
------------------------------------------------------------------------
-- properties
alphabetTestLimit = fmap $ take 4
stateTestLimit = fmap $ take 20
propGenStates :: Property
propGenStates = do
states <- genStates
property $ states == nub states
propGenMapping :: Property
propGenMapping = do
TransitionsDFA mapping <- join $ genMapping <$> genStates <*> genAlphabets
property $ mapping == nub mapping
propGenTransitionNFA :: Property
propGenTransitionNFA = do
TransitionsNFA mapping <- join $ genTransitionNFA <$> genStates <*> genAlphabets
property $ mapping == nub mapping
propNormalizeDFA :: Property
propNormalizeDFA = do
states <- genStates
alphabets <- genAlphabets
dfa <- genDFA states alphabets
dfa' <- return (normalizeDFA dfa)
forAll (genLanguage alphabets) (\ language ->
automaton dfa language == automaton dfa' language && formal dfa'
)
where formal (DFA states alphabets mappings start accepts) =
states == [0 .. (length states - 1)]
propTrimStatesDFA :: Property
propTrimStatesDFA = do
states <- genStates
alphabets <- genAlphabets
dfa <- genDFA states alphabets
dfa' <- return (trimUnreachableStates dfa)
forAll (genLanguage alphabets) (\ language ->
let prop = automaton dfa language == automaton dfa' language in
printTestCase (show dfa ++ "\n" ++ show dfa') prop
)
----------------------------
--
-- Minimize DFA
--
----------------------------
propMinimizeDFA :: Property
propMinimizeDFA = do
states <- stateTestLimit genStates
alphabets <- alphabetTestLimit genAlphabets
dfa <- genDFA states alphabets
dfa' <- return (minimizeDFA dfa)
forAll (genLanguage alphabets) (\ language ->
let prop = automaton dfa language == automaton dfa' language in
printTestCase (show dfa ++ "\n" ++ show dfa') prop
)
----------------------------
--
-- Negation
--
----------------------------
propNegateDFATwice :: Property
propNegateDFATwice = do
states <- genStates
alphabets <- genAlphabets
dfa <- genDFA states alphabets
property (dfa == (negateDFA . negateDFA) dfa)
propNegateDFA :: Property
propNegateDFA = do
alphabets <- genAlphabets
states <- genStates
dfa <- genDFA states alphabets
forAll (genLanguage alphabets) (\ language ->
automaton dfa language /= automaton (negateDFA dfa) language
)
propNegateNFA :: Property
propNegateNFA = do
alphabets <- alphabetTestLimit genAlphabets
states <- stateTestLimit genStates
nfa <- genNFA states alphabets
forAll (genLanguage alphabets) (\ language ->
let
prop = automatonN nfa language /= automatonN (negateNFA nfa) language
in
printTestCase (show nfa ++ "\n" ++ show (negateNFA nfa)) prop
)
----------------------------
--
-- Union
--
----------------------------
propUnionDFA :: Property
propUnionDFA = do
alphabets <- alphabetTestLimit genAlphabets
-- DFA 0
states0 <- stateTestLimit genStates
dfa0 <- genDFA states0 alphabets
-- DFA 1
states1 <- stateTestLimit genStates
dfa1 <- genDFA states1 alphabets
forAll (genLanguage alphabets) (\ language ->
let dfa = dfa0 `unionDFA` dfa1 in
automaton dfa0 language == automaton dfa language ||
automaton dfa1 language == automaton dfa language
)
propUnionNFA :: Property
propUnionNFA = do
alphabets <- take 3 <$> genAlphabets
-- NFA 0
states0 <- take 4 <$> genStates
nfa0 <- genNFA states0 alphabets
-- NFA 1
states1 <- take 4 <$> genStates
nfa1 <- genNFA states1 alphabets
forAll (genLanguage alphabets) (\ language ->
let nfa = nfa0 `unionNFA` nfa1 in
automatonN nfa0 language == automatonN nfa language ||
automatonN nfa1 language == automatonN nfa language
)
----------------------------
--
-- Intersection
--
----------------------------
propIntersectDFA :: Property
propIntersectDFA = do
alphabets <- alphabetTestLimit genAlphabets
-- DFA 0
states0 <- stateTestLimit genStates
dfa0 <- genDFA states0 alphabets
-- DFA 1
states1 <- stateTestLimit genStates
dfa1 <- genDFA states1 alphabets
forAll (genLanguage alphabets) (\ language ->
let dfa = dfa0 `intersectDFA` dfa1 in
automaton dfa0 language ==> automaton dfa language &&
automaton dfa1 language ==> automaton dfa language
)
propIntersectNFA :: Property
propIntersectNFA = do
alphabets <- alphabetTestLimit genAlphabets
-- NFA 0
states0 <- take 4 <$> genStates
nfa0 <- genNFA states0 alphabets
-- NFA 1
states1 <- take 4 <$> genStates
nfa1 <- genNFA states1 alphabets
forAll (genLanguage alphabets) (\ language ->
let
nfa = nfa0 `intersectNFA` nfa1
prop = automatonN nfa0 language ==> automatonN nfa language &&
automatonN nfa1 language ==> automatonN nfa language
in
printTestCase (show nfa0 ++ "\n" ++ show nfa1 ++ "\n" ++ show nfa) prop
)
----------------------------
--
-- Concatenation
--
----------------------------
propConcatenateDFA :: Property
propConcatenateDFA = do
alphabets <- alphabetTestLimit genAlphabets
-- DFA 0
states0 <- take 4 <$> genStates
dfa0 <- genDFA states0 alphabets
lang0 <- genLanguage alphabets
-- DFA 1
states1 <- take 4 <$> genStates
dfa1 <- genDFA states1 alphabets
lang1 <- genLanguage alphabets
dfa <- return $ dfa0 `concatenateDFA` dfa1
printTestCase (show dfa0 ++ "\n" ++ show dfa1 ++ "\n" ++ show dfa) (automaton dfa0 lang0 && automaton dfa1 lang1 ==> automaton dfa (lang0 ++ lang1))
propConcatenateNFA :: Property
propConcatenateNFA = do
alphabets <- genAlphabets
-- NFA 0
states0 <- genStates
nfa0 <- genNFA states0 alphabets
lang0 <- genLanguage alphabets
-- NFA 1
states1 <- genStates
nfa1 <- genNFA states1 alphabets
lang1 <- genLanguage alphabets
nfa <- return $ nfa0 `concatenateNFA` nfa1
printTestCase (show nfa0 ++ "\n" ++ show nfa1 ++ "\n" ++ show nfa) (automatonN nfa0 lang0 && automatonN nfa1 lang1 ==> automatonN nfa (lang0 ++ lang1))
----------------------------
--
-- Kleene Star
--
----------------------------
propKleeneStarDFA :: Property
propKleeneStarDFA = do
states <- take 6 <$> genStates
alphabets <- alphabetTestLimit genAlphabets
dfa <- genDFA states alphabets
forAll (take 5 <$> genLanguage alphabets) (\ language ->
let
dfaS = kleeneStarDFA dfa
repeatedLanguages = take 5 $ iterate (++ language) ""
results = automaton dfaS <$> repeatedLanguages
none = head results
once = head $ tail results
moreTimes = tail $ tail results
prop = none && once ==> (and moreTimes == or moreTimes)
in
printTestCase (show dfa ++ "\n" ++ show dfaS ++ "\n" ++ show repeatedLanguages ++ "\n" ++ show results) prop
)
propKleeneStarNFA :: Property
propKleeneStarNFA = do
states <- stateTestLimit genStates
alphabets <- alphabetTestLimit genAlphabets
nfa <- genNFA states alphabets
forAll (take 5 <$> genLanguage alphabets) (\ language ->
let
nfaS = kleeneStarNFA nfa
repeatedLanguages = take 5 $ iterate (++ language) ""
results = automatonN nfaS <$> repeatedLanguages
none = head results
once = head $ tail results
moreTimes = tail $ tail results
prop = none && once ==> (and moreTimes == or moreTimes)
in
printTestCase (show nfa ++ "\n" ++ show nfaS ++ "\n" ++ show repeatedLanguages ++ "\n" ++ show results) prop
)
----------------------------
--
-- DFA <=> NFA
--
----------------------------
propDFA2NFA :: Property
propDFA2NFA = do
states <- genStates
alphabets <- genAlphabets
dfa <- genDFA states alphabets
nfa <- return (dfa2nfa dfa)
forAll (genLanguage alphabets) (\language ->
automaton dfa language == automatonN nfa language
)
propNFA2DFA :: Property
propNFA2DFA = do
states <- stateTestLimit genStates
alphabets <- alphabetTestLimit genAlphabets
nfa <- genNFA states alphabets
forAll (genLanguage alphabets) (\language ->
let dfa = nfa2dfa nfa
prop = automatonN nfa language == automaton dfa language || automatonN nfa language /= automaton dfa language in
printTestCase (show nfa) prop
)
|
banacorn/formal-language
|
haskell-legacy/test.hs
|
mit
| 15,020 | 0 | 20 | 4,199 | 4,465 | 2,297 | 2,168 | 327 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE QuasiQuotes #-}
module Graphics.Urho3D.Graphics.Geometry(
Geometry
, SharedGeometry
, VectorSharedPtrGeometry
, VectorVectorSharedPtrGeometry
, geometryContext
, geometrySetNumVertexBuffers
, geometrySetVertexBuffer
, geometrySetIndexBuffer
, geometrySetDrawRange
, geometrySetDrawRangeWithVertex
, geometrySetLodDistance
, geometrySetRawVertexData
, geometrySetRawVertexDataMask
, geometrySetRawIndexData
, geometryDraw
, geometryGetVertexBuffers
, geometryGetNumVertexBuffers
, geometryGetVertexBuffer
, geometryGetIndexBuffer
, geometryGetPrimitiveType
, geometryGetIndexStart
, geometryGetIndexCount
, geometryGetVertexStart
, geometryGetVertexCount
, geometryGetLodDistance
, geometryGetBufferHash
, geometryGetRawData
, geometryGetRawDataShared
, geometryGetHitDistance
, geometryIsInside
, geometryIsEmpty
) where
import qualified Language.C.Inline as C
import qualified Language.C.Inline.Cpp as C
import Control.Monad.IO.Class
import Data.Monoid
import Data.Vector (Vector)
import Foreign
import Graphics.Urho3D.Graphics.Internal.Geometry
import Graphics.Urho3D.Container.ForeignVector
import Graphics.Urho3D.Container.Ptr
import Graphics.Urho3D.Container.Vector.Common
import Graphics.Urho3D.Core.Context
import Graphics.Urho3D.Core.Object
import Graphics.Urho3D.Creatable
import Graphics.Urho3D.Graphics.Defs
import Graphics.Urho3D.Graphics.Graphics
import Graphics.Urho3D.Graphics.IndexBuffer
import Graphics.Urho3D.Graphics.VertexBuffer
import Graphics.Urho3D.Math.Ray
import Graphics.Urho3D.Math.Vector2
import Graphics.Urho3D.Math.Vector3
import Graphics.Urho3D.Monad
import Graphics.Urho3D.Parent
C.context (C.cppCtx
<> geometryCntx
<> indexBufferContext
<> objectContext
<> sharedGeometryPtrCntx
<> vertexBufferContext
<> vector2Context
<> vector3Context
<> rayContext
<> vectorContext
<> graphicsContext
<> contextContext
)
C.include "<Urho3D/Graphics/Geometry.h>"
C.using "namespace Urho3D"
geometryContext :: C.Context
geometryContext = geometryCntx
<> sharedGeometryPtrCntx
deriveParent ''Object ''Geometry
instance Creatable (Ptr Geometry) where
type CreationOptions (Ptr Geometry) = Ptr Context
newObject cntxPtr = liftIO $ [C.exp| Geometry* { new Geometry( $(Context* cntxPtr) ) } |]
deleteObject ptr = liftIO $ [C.exp| void { delete $(Geometry* ptr) } |]
sharedPtr "Geometry"
C.verbatim "typedef SharedArrayPtr<unsigned char> SharedArrayWord8;"
C.verbatim "typedef PODVector<VertexElement> PODVectorVertexElement;"
C.verbatim "typedef Vector<SharedPtr<VertexBuffer> > VectorSharedPtrVertexBuffer;"
C.verbatim "typedef SharedArrayPtr<unsigned char> SharedArrayWord8;"
C.verbatim "typedef Vector< SharedPtr<Geometry> > VectorSharedPtrGeometry;"
instance Creatable (Ptr VectorSharedPtrGeometry) where
type CreationOptions (Ptr VectorSharedPtrGeometry) = ()
newObject _ = liftIO [C.exp| VectorSharedPtrGeometry* {new Vector<SharedPtr<Geometry> >() } |]
deleteObject ptr = liftIO [C.exp| void { delete $(VectorSharedPtrGeometry* ptr) } |]
instance ReadableVector VectorSharedPtrGeometry where
type ReadVecElem VectorSharedPtrGeometry = SharedPtr Geometry
foreignVectorLength ptr = liftIO $ fromIntegral <$> [C.exp| int {$(VectorSharedPtrGeometry* ptr)->Size() } |]
foreignVectorElement ptr i = liftIO $ peekSharedPtr =<< [C.exp| SharedGeometry* { new SharedPtr<Geometry>((*$(VectorSharedPtrGeometry* ptr))[$(unsigned int i')]) } |]
where i' = fromIntegral i
instance WriteableVector VectorSharedPtrGeometry where
type WriteVecElem VectorSharedPtrGeometry = SharedPtr Geometry
foreignVectorAppend ptr e = liftIO $ withSharedPtr e $ \e' -> [C.exp| void {$(VectorSharedPtrGeometry* ptr)->Push(*$(SharedGeometry* e')) } |]
C.verbatim "typedef Vector<Vector< SharedPtr<Geometry> > > VectorVectorSharedPtrGeometry;"
instance Creatable (Ptr VectorVectorSharedPtrGeometry) where
type CreationOptions (Ptr VectorVectorSharedPtrGeometry) = ()
newObject _ = liftIO [C.exp| VectorVectorSharedPtrGeometry* {new VectorVectorSharedPtrGeometry() } |]
deleteObject ptr = liftIO [C.exp| void { delete $(VectorVectorSharedPtrGeometry* ptr) } |]
instance ReadableVector VectorVectorSharedPtrGeometry where
type ReadVecElem VectorVectorSharedPtrGeometry = Vector (SharedPtr Geometry)
foreignVectorLength ptr = liftIO $ fromIntegral <$> [C.exp| int {$(VectorVectorSharedPtrGeometry* ptr)->Size() } |]
foreignVectorElement ptr i = liftIO $ peekForeignVectorAs =<< [C.exp| VectorSharedPtrGeometry* { &(*$(VectorVectorSharedPtrGeometry* ptr))[$(unsigned int i')] } |]
where i' = fromIntegral i
instance WriteableVector VectorVectorSharedPtrGeometry where
type WriteVecElem VectorVectorSharedPtrGeometry = Vector (SharedPtr Geometry)
foreignVectorAppend ptr v = liftIO $ withForeignVector () v $ \v' -> [C.exp| void {$(VectorVectorSharedPtrGeometry* ptr)->Push(*$(VectorSharedPtrGeometry* v')) } |]
-- | Set number of vertex buffers.
geometrySetNumVertexBuffers :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> Word -- ^ num
-> m Bool
geometrySetNumVertexBuffers p n = liftIO $ do
let ptr = parentPointer p
n' = fromIntegral n
toBool <$> [C.exp| int {$(Geometry* ptr)->SetNumVertexBuffers($(unsigned int n'))} |]
-- bool SetNumVertexBuffers(unsigned num);
-- | Set a vertex buffer by index.
geometrySetVertexBuffer :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> Word -- ^ index
-> Ptr VertexBuffer -- ^ buffer
-> m Bool
geometrySetVertexBuffer p i b = liftIO $ do
let ptr = parentPointer p
i' = fromIntegral i
toBool <$> [C.exp| int {$(Geometry* ptr)->SetVertexBuffer($(unsigned int i'), $(VertexBuffer* b))} |]
-- bool SetVertexBuffer(unsigned index, VertexBuffer* buffer);
-- | Set the index buffer.
geometrySetIndexBuffer :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> Ptr IndexBuffer -- ^ buffer
-> m ()
geometrySetIndexBuffer p b = liftIO $ do
let ptr = parentPointer p
[C.exp| void {$(Geometry* ptr)->SetIndexBuffer($(IndexBuffer* b))} |]
-- void SetIndexBuffer(IndexBuffer* buffer);
-- | Set the draw range.
geometrySetDrawRange :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> PrimitiveType -- ^ type
-> Word -- ^ Index start
-> Word -- ^ Index count
-> Bool -- ^ Get used vertex range (default True)
-> m Bool
geometrySetDrawRange p pt indexStart indexCount used = liftIO $ do
let ptr = parentPointer p
pt' = fromIntegral . fromEnum $ pt
indexStart' = fromIntegral indexStart
indexCount' = fromIntegral indexCount
used' = fromBool used
toBool <$> [C.exp| int {$(Geometry* ptr)->SetDrawRange((PrimitiveType)$(int pt'), $(unsigned int indexStart'), $(unsigned int indexCount'), $(int used') != 0)} |]
-- bool SetDrawRange(PrimitiveType type, unsigned indexStart, unsigned indexCount, bool getUsedVertexRange = true);
-- | Set the draw range.
geometrySetDrawRangeWithVertex :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> PrimitiveType -- ^ type
-> Word -- ^ Index start
-> Word -- ^ Index count
-> Word -- ^ Vertex start
-> Word -- ^ Vertex count
-> Bool -- ^ Check illegal (default True)
-> m Bool
geometrySetDrawRangeWithVertex p pt indexStart indexCount vertexStart vertexCount checkIllegal = liftIO $ do
let ptr = parentPointer p
pt' = fromIntegral . fromEnum $ pt
indexStart' = fromIntegral indexStart
indexCount' = fromIntegral indexCount
vertexStart' = fromIntegral vertexStart
vertexCount' = fromIntegral vertexCount
checkIllegal' = fromBool checkIllegal
toBool <$> [C.exp| int {$(Geometry* ptr)->SetDrawRange((PrimitiveType)$(int pt'), $(unsigned int indexStart'), $(unsigned int indexCount')
, $(unsigned int vertexStart'), $(unsigned int vertexCount'), $(int checkIllegal') != 0)} |]
-- bool SetDrawRange(PrimitiveType type, unsigned indexStart, unsigned indexCount, unsigned vertexStart, unsigned vertexCount,
-- bool checkIllegal = true);
-- | Set the LOD distance.
geometrySetLodDistance :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> Float -- ^ distance
-> m ()
geometrySetLodDistance p d = liftIO $ do
let ptr = parentPointer p
d' = realToFrac d
[C.exp| void {$(Geometry* ptr)->SetLodDistance($(float d'))} |]
-- void SetLodDistance(float distance);
-- | Override raw vertex data to be returned for CPU-side operations.
geometrySetRawVertexData :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> SharedArrayPtr Word8 -- ^ data
-> Vector VertexElement -- ^ elements
-> m ()
geometrySetRawVertexData p datum elements = liftIO $ withForeignVector () elements $ \elements' ->
withSharedArrayPtr datum $ \_ datum' -> do
let ptr = parentPointer p
[C.exp| void {$(Geometry* ptr)->SetRawVertexData(*$(SharedArrayWord8* datum'), *$(PODVectorVertexElement* elements'))} |]
-- void SetRawVertexData(SharedArrayPtr<unsigned char> data, const PODVector<VertexElement>& elements);
-- | Override raw vertex data to be returned for CPU-side operations using a legacy vertex bitmask.
geometrySetRawVertexDataMask :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> SharedArrayPtr Word8 -- ^ data
-> Word -- ^ element mask
-> m ()
geometrySetRawVertexDataMask p datum elementMask = liftIO $ withSharedArrayPtr datum $ \_ datum' -> do
let ptr = parentPointer p
elementMask' = fromIntegral elementMask
[C.exp| void {$(Geometry* ptr)->SetRawVertexData(*$(SharedArrayWord8* datum'), $(unsigned int elementMask'))} |]
-- void SetRawVertexData(SharedArrayPtr<unsigned char> data, unsigned elementMask);
-- | Override raw index data to be returned for CPU-side operations.
geometrySetRawIndexData :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> SharedArrayPtr Word8 -- ^ data
-> Word -- ^ index size
-> m ()
geometrySetRawIndexData p datum indexSize = liftIO $ withSharedArrayPtr datum $ \_ datum' -> do
let ptr = parentPointer p
indexSize' = fromIntegral indexSize
[C.exp| void {$(Geometry* ptr)->SetRawIndexData(*$(SharedArrayWord8* datum'), $(unsigned int indexSize'))} |]
-- void SetRawIndexData(SharedArrayPtr<unsigned char> data, unsigned indexSize);
-- | Draw.
geometryDraw :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> Ptr Graphics -- ^ graphics system
-> m ()
geometryDraw p gr = liftIO $ do
let ptr = parentPointer p
[C.exp| void {$(Geometry* ptr)->Draw($(Graphics* gr))} |]
-- void Draw(Graphics* graphics);
-- | Return all vertex buffers.
geometryGetVertexBuffers :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> m (Vector (SharedPtr VertexBuffer))
geometryGetVertexBuffers p = liftIO $ do
let ptr = parentPointer p
peekForeignVectorAs =<< [C.exp| const VectorSharedPtrVertexBuffer* {&$(Geometry* ptr)->GetVertexBuffers()} |]
-- const Vector<SharedPtr<VertexBuffer> >& GetVertexBuffers() const { return vertexBuffers_; }
-- | Return number of vertex buffers.
geometryGetNumVertexBuffers :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> m Word
geometryGetNumVertexBuffers p = liftIO $ do
let ptr = parentPointer p
fromIntegral <$> [C.exp| unsigned int {$(Geometry* ptr)->GetNumVertexBuffers()} |]
-- unsigned GetNumVertexBuffers() const { return vertexBuffers_.Size(); }
-- | Return vertex buffer by index.
geometryGetVertexBuffer :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> Word -- ^ index
-> m (Maybe (Ptr VertexBuffer))
geometryGetVertexBuffer p i = liftIO $ do
let ptr = parentPointer p
i' = fromIntegral i
wrapNullPtr <$> [C.exp| VertexBuffer* {$(Geometry* ptr)->GetVertexBuffer($(unsigned int i'))} |]
-- VertexBuffer* GetVertexBuffer(unsigned index) const;
-- | Return the index buffer.
geometryGetIndexBuffer :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> m (Maybe (Ptr IndexBuffer))
geometryGetIndexBuffer p = liftIO $ do
let ptr = parentPointer p
wrapNullPtr <$> [C.exp| IndexBuffer* {$(Geometry* ptr)->GetIndexBuffer()} |]
-- IndexBuffer* GetIndexBuffer() const { return indexBuffer_; }
-- | Return primitive type.
geometryGetPrimitiveType :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> m PrimitiveType
geometryGetPrimitiveType p = liftIO $ do
let ptr = parentPointer p
toEnum . fromIntegral <$> [C.exp| int {(int)$(Geometry* ptr)->GetPrimitiveType()} |]
-- PrimitiveType GetPrimitiveType() const { return primitiveType_; }
-- | Return start index.
geometryGetIndexStart :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> m Word
geometryGetIndexStart p = liftIO $ do
let ptr = parentPointer p
fromIntegral <$> [C.exp| unsigned int {$(Geometry* ptr)->GetIndexStart()} |]
-- unsigned GetIndexStart() const { return indexStart_; }
-- | Return number of indices.
geometryGetIndexCount :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> m Word
geometryGetIndexCount p = liftIO $ do
let ptr = parentPointer p
fromIntegral <$> [C.exp| unsigned int {$(Geometry* ptr)->GetIndexCount()} |]
-- unsigned GetIndexCount() const { return indexCount_; }
-- | Return first used vertex.
geometryGetVertexStart :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> m Word
geometryGetVertexStart p = liftIO $ do
let ptr = parentPointer p
fromIntegral <$> [C.exp| unsigned int {$(Geometry* ptr)->GetVertexStart()} |]
-- unsigned GetVertexStart() const { return vertexStart_; }
-- | Return number of used vertices.
geometryGetVertexCount :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> m Word
geometryGetVertexCount p = liftIO $ do
let ptr = parentPointer p
fromIntegral <$> [C.exp| unsigned int {$(Geometry* ptr)->GetVertexCount()} |]
-- unsigned GetVertexCount() const { return vertexCount_; }
-- | Return LOD distance.
geometryGetLodDistance :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> m Float
geometryGetLodDistance p = liftIO $ do
let ptr = parentPointer p
realToFrac <$> [C.exp| float {$(Geometry* ptr)->GetLodDistance()} |]
-- float GetLodDistance() const { return lodDistance_; }
-- | Return buffers' combined hash value for state sorting.
geometryGetBufferHash :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> m Word
geometryGetBufferHash p = liftIO $ do
let ptr = parentPointer p
fromIntegral <$> [C.exp| unsigned int {$(Geometry* ptr)->GetBufferHash()} |]
-- unsigned short GetBufferHash() const;
-- | Return raw vertex and index data for CPU operations, or null pointers if not available. Will return data of the first vertex buffer if override data not set.
geometryGetRawData :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> m (
Maybe (Ptr Word8) -- vertex data
, Word -- vertex size
, Maybe (Ptr Word8) -- index data
, Word -- index size
, Vector VertexElement -- elements
) -- ^ return (vertex data, vertex size, index data, index size, elements)
geometryGetRawData p = liftIO $ alloca $ \vertexData -> alloca $ \vertexSize -> alloca $ \indexData -> alloca $ \indexSize -> alloca $ \elements -> do
let ptr = parentPointer p
[C.block| void {
const unsigned char *vertexData;
unsigned vertexSize;
const unsigned char *indexData;
unsigned indexSize;
const PODVectorVertexElement* elements;
$(Geometry* ptr)->GetRawData(vertexData, vertexSize, indexData, indexSize, elements);
*$(const unsigned char** vertexData) = vertexData;
*$(unsigned int* vertexSize) = vertexSize;
*$(const unsigned char** indexData) = indexData;
*$(unsigned int* indexSize) = indexSize;
*$(const PODVectorVertexElement** elements) = elements;
} |]
(,,,,)
<$> (wrapNullPtr . castPtr <$> peek vertexData)
<*> (fromIntegral <$> peek vertexSize)
<*> (wrapNullPtr . castPtr <$> peek indexData)
<*> fmap fromIntegral (peek indexSize)
<*> (peekForeignVectorAs =<< peek elements)
-- void GetRawData(const unsigned char*& vertexData, unsigned& vertexSize, const unsigned char*& indexData, unsigned& indexSize, const PODVector<VertexElement>*& elements) const;
-- | Return raw vertex and index data for CPU operations, or null pointers if not available. Will return data of the first vertex buffer if override data not set.
geometryGetRawDataShared :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> m (
Maybe (SharedArrayPtr Word8) -- vertex data
, Maybe (SharedArrayPtr Word8) -- index data
, Vector VertexElement -- elements
) -- ^ return (vertex data, index data, elements)
geometryGetRawDataShared p = liftIO $ alloca $ \vertexData -> alloca $ \vertexSizePtr -> alloca $ \indexData -> alloca $ \indexSizePtr -> alloca $ \elements -> do
let ptr = parentPointer p
[C.block| void {
SharedArrayPtr<unsigned char> vertexData;
unsigned vertexSize;
SharedArrayPtr<unsigned char> indexData;
unsigned indexSize;
const PODVectorVertexElement* elements;
$(Geometry* ptr)->GetRawDataShared(vertexData, vertexSize, indexData, indexSize, elements);
*$(SharedArrayWord8** vertexData) = new SharedArrayPtr<unsigned char>(vertexData);
*$(unsigned int* vertexSizePtr) = vertexSize;
*$(SharedArrayWord8** indexData) = new SharedArrayPtr<unsigned char>(indexData);
*$(unsigned int* indexSizePtr) = indexSize;
*$(const PODVectorVertexElement** elements) = elements;
} |]
vertexSize <- fromIntegral <$> peek vertexSizePtr
indexSize <- fromIntegral <$> peek indexSizePtr
(,,)
<$> (fmap wrapNullPtr . peekSharedArrayPtr vertexSize =<< peek vertexData)
<*> (fmap wrapNullPtr . peekSharedArrayPtr indexSize =<< peek indexData)
<*> (peekForeignVectorAs =<< peek elements)
-- void GetRawDataShared(SharedArrayPtr<unsigned char>& vertexData, unsigned& vertexSize, SharedArrayPtr<unsigned char>& indexData,
-- unsigned& indexSize, const PODVector<VertexElement>*& elements) const;
-- | Return ray hit distance or infinity if no hit. Requires raw data to be set. Optionally return hit normal and hit uv coordinates at intersect point.
geometryGetHitDistance :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> Ray -- ^ ray
-> m (Float, Vector3, Vector2)
geometryGetHitDistance p ray = liftIO $ with ray $ \ray' -> alloca $ \hv' -> alloca $ \uv' -> do
let ptr = parentPointer p
d <- realToFrac <$> [C.exp| float {$(Geometry* ptr)->GetHitDistance(*$(Ray* ray'), $(Vector3* hv'), $(Vector2* uv'))} |]
(,,)
<$> pure d
<*> peek hv'
<*> peek uv'
-- float GetHitDistance(const Ray& ray, Vector3* outNormal = 0, Vector2* outUV = 0) const;
-- | Return whether or not the ray is inside geometry.
geometryIsInside :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> Ray -- ^ ray
-> m Bool
geometryIsInside p ray = liftIO $ with ray $ \ray' -> do
let ptr = parentPointer p
toBool <$> [C.exp| int {$(Geometry* ptr)->IsInside(*$(Ray* ray'))} |]
-- bool IsInside(const Ray& ray) const;
-- | Return whether has empty draw range.
geometryIsEmpty :: (Parent Geometry a, Pointer p a, MonadIO m)
=> p -- ^ Pointer to Geometry or ascentor
-> m Bool
geometryIsEmpty p = liftIO $ do
let ptr = parentPointer p
toBool <$> [C.exp| int {$(Geometry* ptr)->IsEmpty()} |]
-- bool IsEmpty() const { return indexCount_ == 0 && vertexCount_ == 0; }
|
Teaspot-Studio/Urho3D-Haskell
|
src/Graphics/Urho3D/Graphics/Geometry.hs
|
mit
| 20,126 | 0 | 24 | 3,345 | 3,735 | 2,018 | 1,717 | -1 | -1 |
module Purecoin.Network.DataTypes
( NetworkAddress(..)
, InventoryVector(..)
, Version(..)
, GetBlocks(..)
, Inv(..)
, protocolVersion, protocolSubVersion
) where
import Control.Applicative ((<$>), (<*>))
import Data.Word (Word16, Word32, Word64)
import Data.Time (UTCTime)
import Data.Time.Clock.POSIX (posixSecondsToUTCTime, utcTimeToPOSIXSeconds)
import Data.ByteString (ByteString, pack)
import Purecoin.Core.Serialize ( Serialize
, get, getWord16be, getWord32le, getWord64le, getList, getBytes, getVarByteString
, put, putWord16be, putWord32le, putWord64le, putList, putByteString, putVarByteString
)
import Purecoin.Core.Hash (Hash)
data NetworkAddress = NetworkAddress { naServices :: Word64
, naIPv6 :: ByteString
, naPort :: Word16
} deriving Show
data InventoryVector = InventoryVector { ivType :: Word32
, ivHash :: Hash
} deriving Show
data Version = Version { version :: Word32
, services :: Word64
, timestamp :: UTCTime
, addr_me :: NetworkAddress
, addr_you :: NetworkAddress
, nonce :: Word64
, sub_version_num :: ByteString
, start_height :: Word32
} deriving Show
data GetBlocks = GetBlocks { gbVersion :: Word32
, gbHash_start :: [Hash]
, gbHash_stop :: Hash
} deriving Show
newtype Inv = Inv { inventory :: [InventoryVector] } deriving Show
instance Serialize NetworkAddress where
get = NetworkAddress <$> getWord64le <*> getBytes 16 <*> getWord16be
put (NetworkAddress s ip p) = putWord64le s >> putByteString ip >> putWord16be p
instance Serialize InventoryVector where
get = InventoryVector <$> getWord32le <*> get
put (InventoryVector t h) = putWord32le t >> put h
instance Serialize Version where
get = Version <$> getWord32le
<*> getWord64le
<*> (posixSecondsToUTCTime . fromIntegral <$> getWord64le)
<*> get
<*> get
<*> getWord64le
<*> getVarByteString
<*> getWord32le
put (Version v s t am ay n sv sh) = putWord32le v
>> putWord64le s
>> (putWord64le . round . utcTimeToPOSIXSeconds $ t)
>> put am
>> put ay
>> putWord64le n
>> putVarByteString sv
>> putWord32le sh
instance Serialize GetBlocks where
get = GetBlocks <$> getWord32le <*> getList <*> get
put (GetBlocks v hs he) = putWord32le v >> putList hs >> put he
instance Serialize Inv where
get = Inv <$> getList
put (Inv ivs) = putList ivs
protocolVersion :: Word32
protocolVersion = 32002
protocolSubVersion :: ByteString
protocolSubVersion = pack (map (toEnum.fromEnum) "Purecoin")
|
laanwj/Purecoin
|
Purecoin/Network/DataTypes.hs
|
mit
| 3,286 | 0 | 15 | 1,253 | 739 | 421 | 318 | 70 | 1 |
module Y2020.M09.D08.Solution where
{--
Yesterday* ...
--}
import Y2020.M09.D01.Solution
{--
*time is so fluid for me, ... it being time, and all ... *rolleyes*
... we build this city ...
... wait: we built this ONTOLOGY of words to novels in the top 100-read
books in gutenberg.
OR. DID. WE?
I concluded that exercise with: "I noticed there are gutenberg-artefacts, so
I'm removing words that occur in all 100 books."
But was that a valid assertion?
What other assertions can we make of these data we collected?
Today's Haskell Exercise: data analyses.
--}
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Y2020.M08.D25.Solution (gutenbergIndex, workingDir, gutenbergTop100Index)
import Y2020.M08.D26.Solution (importLibrary)
import Y2020.M08.D31.Solution (stopwords, loadStopwords)
type WordOccurrences = Map String Int
-- A little helper-function to load the ontology
ont :: IO Ontology
ont = let nupes = loadStopwords stopwords
idx = gutenbergIndex (workingDir ++ gutenbergTop100Index)
lib = idx >>= importLibrary
in bookVec <$> nupes <*> lib
{--
>>> ont
... zillions of entries later ...
>>> let mont = it
... `mont` stands for `my ont(ology)`
... now we need to get the WordOccurrences
>>> let wc = ontology mont
--}
-- 0. How many words are in all the books?
allWordsCount :: WordOccurrences -> Int
allWordsCount = length
{--
>>> allWordsCount wc
251429
That took a little while.
--}
-- 1. How many words, and what are the words that occur in all 100 books?
inAllBooks :: WordOccurrences -> Set String
inAllBooks = Map.keysSet . Map.filter (== 100)
{--
>>> let iab = inAllBooks wc
>>> iab
{"body","ebook","ebooks","free","gutenberg","library",
"page","project","search","start"}
>>> length iab
10
--}
-- 2. How many words, and what are the words, that occur only once?
onlyOneOccurringWords :: WordOccurrences -> Set String
onlyOneOccurringWords = Map.keysSet . Map.filter (== 1)
{--
Hm:
>>> let ooow = onlyOneOccurringWords wc
>>> ooow
lots of messy words, but then we also have words of this form:
... "liveries\226\128\157" ...
It looks like we may need to clean up our clean-up algorithm, because:
>>> length ooow
195035
Is a lot of words. Some of those words may be useful in clustering (later).
--}
-- 3. Which words occur only once in a book (any book) ... even if those words
-- occur in multiple books, if they occur just once in any book, what are these
-- words?
oneWordInBook :: Ontology -> Set String
oneWordInBook = Set.unions . map onlyOneOccurringWords . Map.elems
{--
>>> let ooib = oneWordInBook mont
>>> ooib
... lots of words, again ...
>>> length ooib
205354
Good Heavens!
--}
-- 4. Okay. Remove all the one-word and all-books-words from out ontology.
removeInfreqs :: Set String -> Ontology -> Ontology
removeInfreqs = Map.map . flip (foldr ri')
-- well, to remove words from the ontology, we have to remove a word from
-- each book word-count.
ri' :: String -> Map String Int -> Map String Int
ri' = Map.delete
-- ummm ... that was easy. TOO EASY! :<
{--
>>> let newOnt = removeInfreqs (Set.unions [iab, ooow, ooib]) mont
>>> length newOnt
100
... as expected, as there are still 100 books being codified, but ...
>>> let newWc = ontology newOnt
>>> allWordsCount newWc
2333
Wow! Huge difference! Good or bad?
We will find that out on another, fine Haskell-problem-solving day!
--}
|
geophf/1HaskellADay
|
exercises/HAD/Y2020/M09/D08/Solution.hs
|
mit
| 3,465 | 0 | 11 | 638 | 361 | 211 | 150 | 27 | 1 |
module Day1 (day1, day1', run) where
day1 :: String -> Int
day1 = foldl move 0
move :: Int -> Char -> Int
move x c = case c of
'(' -> x + 1
')' -> x - 1
_ -> x
day1' :: String -> Int
day1' = day1'' 0 0
day1'' :: Int -> Int -> String -> Int
day1'' (-1) index _ = index
day1'' _ _ [] = -1
day1'' currentFloor index (input:rest) =
day1'' (move currentFloor input) (index + 1) rest
-- Input
run :: IO ()
run = do
putStrLn "Day 1 results: "
input <- readFile "inputs/day1.txt"
putStrLn $ " " ++ show (day1 input)
putStrLn $ " " ++ show (day1' input)
|
brianshourd/adventOfCode2015
|
src/Day1.hs
|
mit
| 630 | 0 | 10 | 206 | 269 | 137 | 132 | 21 | 3 |
import Expr
data DefUseE = Def String Expr | Use String
traceMExpr :: [DefUseE] -> [(String, Int)]
|
kdungs/coursework-functional-programming
|
12/trans.hs
|
mit
| 102 | 0 | 7 | 20 | 41 | 24 | 17 | 3 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Yesod.Default.Main
( defaultMain
, defaultMainLog
, defaultRunner
, defaultDevelApp
, LogFunc
) where
import Yesod.Default.Config
import Network.Wai (Application)
import Network.Wai.Handler.Warp
(runSettings, defaultSettings, settingsPort, settingsHost, settingsOnException)
import qualified Network.Wai.Handler.Warp as Warp
import System.Directory (doesDirectoryExist, removeDirectoryRecursive)
import Network.Wai.Middleware.Gzip (gzip, GzipFiles (GzipCacheFolder), gzipFiles, def)
import Network.Wai.Middleware.Autohead (autohead)
import Network.Wai.Middleware.Jsonp (jsonp)
import Control.Monad (when)
import System.Environment (getEnvironment)
import Data.Maybe (fromMaybe)
import Safe (readMay)
import Control.Monad.Logger (Loc, LogSource, LogLevel (LevelError), liftLoc)
import System.Log.FastLogger (LogStr, toLogStr)
import Language.Haskell.TH.Syntax (qLocation)
#ifndef WINDOWS
import qualified System.Posix.Signals as Signal
import Control.Concurrent (forkIO, killThread)
import Control.Concurrent.MVar (newEmptyMVar, putMVar, takeMVar)
#endif
-- | Run your app, taking environment and port settings from the
-- commandline.
--
-- @'fromArgs'@ helps parse a custom configuration
--
-- > main :: IO ()
-- > main = defaultMain (fromArgs parseExtra) makeApplication
--
defaultMain :: (Show env, Read env)
=> IO (AppConfig env extra)
-> (AppConfig env extra -> IO Application)
-> IO ()
defaultMain load getApp = do
config <- load
app <- getApp config
runSettings defaultSettings
{ settingsPort = appPort config
, settingsHost = appHost config
} app
type LogFunc = Loc -> LogSource -> LogLevel -> LogStr -> IO ()
-- | Same as @defaultMain@, but gets a logging function back as well as an
-- @Application@ to install Warp exception handlers.
--
-- Since 1.2.5
defaultMainLog :: (Show env, Read env)
=> IO (AppConfig env extra)
-> (AppConfig env extra -> IO (Application, LogFunc))
-> IO ()
defaultMainLog load getApp = do
config <- load
(app, logFunc) <- getApp config
runSettings defaultSettings
{ settingsPort = appPort config
, settingsHost = appHost config
, settingsOnException = const $ \e -> when (shouldLog' e) $ logFunc
$(qLocation >>= liftLoc)
"yesod"
LevelError
(toLogStr $ "Exception from Warp: " ++ show e)
} app
where
shouldLog' = Warp.defaultShouldDisplayException
-- | Run your application continously, listening for SIGINT and exiting
-- when received
--
-- > withYourSite :: AppConfig DefaultEnv -> Logger -> (Application -> IO a) -> IO ()
-- > withYourSite conf logger f = do
-- > Settings.withConnectionPool conf $ \p -> do
-- > runConnectionPool (runMigration yourMigration) p
-- > defaultRunner f $ YourSite conf logger p
defaultRunner :: (Application -> IO ()) -> Application -> IO ()
defaultRunner f app = do
-- clear the .static-cache so we don't have stale content
exists <- doesDirectoryExist staticCache
when exists $ removeDirectoryRecursive staticCache
#ifdef WINDOWS
f (middlewares app)
#else
tid <- forkIO $ f (middlewares app) >> return ()
flag <- newEmptyMVar
_ <- Signal.installHandler Signal.sigINT (Signal.CatchOnce $ do
putStrLn "Caught an interrupt"
killThread tid
putMVar flag ()) Nothing
takeMVar flag
#endif
where
middlewares = gzip gset . jsonp . autohead
gset = def { gzipFiles = GzipCacheFolder staticCache }
staticCache = ".static-cache"
-- | Run your development app using a custom environment type and loader
-- function
defaultDevelApp
:: (Show env, Read env)
=> IO (AppConfig env extra) -- ^ A means to load your development @'AppConfig'@
-> (AppConfig env extra -> IO Application) -- ^ Get your @Application@
-> IO (Int, Application)
defaultDevelApp load getApp = do
conf <- load
env <- getEnvironment
let p = fromMaybe (appPort conf) $ lookup "PORT" env >>= readMay
pdisplay = fromMaybe p $ lookup "DISPLAY_PORT" env >>= readMay
putStrLn $ "Devel application launched: http://localhost:" ++ show pdisplay
app <- getApp conf
return (p, app)
|
wujf/yesod
|
yesod/Yesod/Default/Main.hs
|
mit
| 4,460 | 0 | 16 | 999 | 921 | 512 | 409 | 85 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveGeneric #-}
module Pump.CF.Type where
import Autolib.Size
import Autolib.Hash
import Autolib.Reader
import Autolib.ToDoc
import Data.Typeable
import GHC.Generics
data Zerlegung = Zerlegung
{ u :: String, v :: String
, x :: String, y :: String, z :: String }
deriving (Eq, Ord, Typeable, Generic)
$(derives [makeReader, makeToDoc] [''Zerlegung])
instance Show Zerlegung where show = render . toDoc
instance Hash Zerlegung
|
marcellussiegburg/autotool
|
collection/src/Pump/CF/Type.hs
|
gpl-2.0
| 536 | 2 | 9 | 129 | 146 | 85 | 61 | 16 | 0 |
module Cfg where
import Data.Maybe
import Data.List
import System.Path
import System.Directory
import Language.Haskell.Interpreter
data Cfg = Cfg {files :: [String], views :: [(String,String)]} deriving (Show)
defaultCfg = Cfg {files = [],views=[(".pdf","gv"),(".ps","gv")]} -- (1)
parsedCfg = defaultCfg {files = ["theory.bib"]}
mkVal f n setFields = interpret (fromMaybe (show $ f defaultCfg) $ lookup n setFields)
fromEither a v =
case v of
Left _ -> a
Right b -> b
--ToDo:
-- refactor so adding Cfg fields is easier
-- now must add
makeCfg :: FilePath -> Interpreter Cfg
makeCfg home = do
loadModules [fromMaybe "" (absNormPath home ".hsbib/config.hs")]
exports <- getModuleExports "UserConfig"
setTopLevelModules ["UserConfig"]
setImportsQ [("Prelude", Nothing)]
let setKeys = map name exports
setVals <- mapM eval setKeys
let setFields = zip setKeys setVals
fileVal <- mkVal files "files" setFields (as :: [String]) -- (2)
viewVal <- mkVal views "views" setFields (as :: [(String,String)]) -- (3)
return Cfg {files=fileVal,views=viewVal} -- (4)
getCfg :: IO Cfg
getCfg = do
home <- getHomeDirectory
r <- runInterpreter (makeCfg home)
return $ fromEither defaultCfg r
|
zaxtax/hsbib
|
Cfg.hs
|
gpl-2.0
| 1,230 | 0 | 11 | 228 | 444 | 236 | 208 | 31 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.