code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Prompt.Window
-- Copyright : Devin Mullins <[email protected]>
-- Andrea Rossato <[email protected]>
-- License : BSD-style (see LICENSE)
--
-- Maintainer : Devin Mullins <[email protected]>
-- Andrea Rossato <[email protected]>
-- Stability : unstable
-- Portability : unportable
--
-- xprompt operations to bring windows to you, and bring you to windows.
--
-----------------------------------------------------------------------------
module XMonad.Prompt.MyWindow
(
-- * Usage
-- $usage
windowPromptGoto,
windowPromptBring,
windowPromptBringCopy,
WindowPrompt,
focusRecent,
) where
import qualified Data.Map as M
import qualified XMonad.StackSet as W
import XMonad
import XMonad.Prompt
import XMonad.Actions.CopyWindow
import XMonad.Actions.MyWindowBringer
import qualified XMonad.Hooks.MyWindowHistory as WH
-- $usage
-- WindowPrompt brings windows to you and you to windows.
-- That is to say, it pops up a prompt with window names, in case you forgot
-- where you left your XChat.
--
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Prompt
-- > import XMonad.Prompt.Window
--
-- and in the keys definition:
--
-- > , ((modm .|. shiftMask, xK_g ), windowPromptGoto def)
-- > , ((modm .|. shiftMask, xK_b ), windowPromptBring def)
--
-- The autoComplete option is a handy complement here:
--
-- > , ((modm .|. shiftMask, xK_g ), windowPromptGoto
-- > def { autoComplete = Just 500000 } )
--
-- The \'500000\' is the number of microseconds to pause before sending you to
-- your new window. This is useful so that you don't accidentally send some
-- keystrokes to the selected client.
--
-- For detailed instruction on editing the key binding see
-- "XMonad.Doc.Extending#Editing_key_bindings".
data WindowPrompt = Goto | Bring | BringCopy
instance XPrompt WindowPrompt where
showXPrompt Goto = ""
showXPrompt Bring = "Bring window: "
showXPrompt BringCopy = "Bring a copy: "
commandToComplete _ c = c
nextCompletion _ = getNextCompletion
windowPromptGoto, windowPromptBring, windowPromptBringCopy :: XPConfig -> X ()
windowPromptGoto = doPrompt Goto
windowPromptBring = doPrompt Bring
windowPromptBringCopy = doPrompt BringCopy
-- | Pops open a prompt with window titles. Choose one, and you will be
-- taken to the corresponding workspace.
doPrompt :: WindowPrompt -> XPConfig -> X ()
doPrompt t c = do
a <- case t of
Goto -> fmap gotoAction windowMap
Bring -> fmap bringAction windowMap
BringCopy -> fmap bringCopyAction windowMap
wm <- windowMap
wmr <- windowMapRecent
mkXPrompt t c (compList wmr) a
where
winAction a m = flip whenJust (windows . a) . flip M.lookup m
gotoAction = winAction W.focusWindow
bringAction = winAction bringWindow
bringCopyAction = winAction bringCopyWindow
compList m s = return . flip myPopWordsFilter s . map fst $ m
-- | Brings a copy of the specified window into the current workspace.
bringCopyWindow :: Window -> WindowSet -> WindowSet
bringCopyWindow w ws = copyWindow w (W.currentTag ws) ws
focusRecent :: X ()
focusRecent = do
ws <- currentWorkspace
wh <- WH.windowHistory
if wins ws wh == []
then return ()
else windows $ W.focusWindow (head (wins ws wh))
where wins ws wh = recentWindows ws wh
| eb-gh-cr/XMonadContrib1 | XMonad/Prompt/MyWindow.hs | bsd-3-clause | 3,593 | 0 | 13 | 782 | 552 | 309 | 243 | 49 | 3 |
-- Copyright © 2012 Frank S. Thomas <[email protected]>
-- All rights reserved.
--
-- Use of this source code is governed by a BSD-style license that
-- can be found in the LICENSE file.
-- | Ohloh API Reference: <http://meta.ohloh.net/referencecontributor_fact/>
module Web.Ohloh.ContributorFact (
ContributorFact(..),
xpContributorFact
) where
import Text.XML.HXT.Arrow.Pickle
import Web.Ohloh.Common
import Web.Ohloh.ContributorLanguageFact
-- | 'ContributorFact' contains a selection of high-level statistics about a
-- person who commited source code to a 'Web.Ohloh.Project.Project'.
data ContributorFact = ContributorFact {
cfContributorId :: String,
cfAccountId :: Maybe String,
cfAccountName :: Maybe String,
cfAnalysisId :: String,
cfContributorName :: String,
cfPrimaryLanguageId :: String,
cfPrimaryLanguageNiceName :: String,
cfCommentRatio :: Double,
cfFirstCommitTime :: String,
cfLastCommitTime :: String,
cfManMonths :: Int,
cfCommits :: Int,
cfMedianCommits :: Int,
cfContributorLanguageFacts :: Maybe [ContributorLanguageFact]
} deriving (Eq, Read, Show)
instance XmlPickler ContributorFact where
xpickle = xpContributorFact
instance ReadXmlString ContributorFact
instance ShowXmlString ContributorFact
xpContributorFact :: PU ContributorFact
xpContributorFact =
xpElem "contributor_fact" $
xpWrap (uncurry14 ContributorFact,
\(ContributorFact ci aci acn ai cn pli plnn cr fct lct mm c mc clf) ->
(ci, aci, acn, ai, cn, pli, plnn, cr, fct, lct, mm, c, mc, clf)) $
xp14Tuple (xpElem "contributor_id" xpText0)
(xpOption (xpElem "account_id" xpText0))
(xpOption (xpElem "account_name" xpText0))
(xpElem "analysis_id" xpText0)
(xpElem "contributor_name" xpText0)
(xpElem "primary_language_id" xpText0)
(xpElem "primary_language_nice_name" xpText0)
(xpElem "comment_ratio" xpPrim)
(xpElem "first_commit_time" xpText0)
(xpElem "last_commit_time" xpText0)
(xpElem "man_months" xpInt)
(xpElem "commits" xpInt)
(xpElem "median_commits" xpInt)
(xpOption (xpElem "contributor_language_facts" xpickle))
| fthomas/ohloh-hs | Web/Ohloh/ContributorFact.hs | bsd-3-clause | 2,299 | 0 | 11 | 519 | 468 | 266 | 202 | 46 | 1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE FlexibleInstances #-}
module Util where
import qualified Numeric
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import Control.Monad.RWS.Class
import Data.List.Split (chunksOf)
import qualified Data.Binary as Bin
import qualified Data.Binary.Get as Bin
import Data.Int
import Data.Word
import Data.List ((!!))
import Data.Bits
import Data.Bits.Lens
import Linear
import Overture
import Prelude ()
class Display a where
display :: a -> String
default display :: (Show a) => a -> String
display = show
instance Display Word8
instance Display Word16
instance Display Word32
instance Display Word64
instance Display Int8
instance Display Int16
instance Display Int32
instance Display Int64
instance Display Int
instance Display Float
instance Display Double
instance Display Bool
instance Display Integer
instance Display Rational where
display n = show (realToFrac n :: Float)
instance Display Char where
display = return
instance Display String where
display = id
instance Display BS.ByteString where
display = intercalate "\n"
. ([""] ++)
. map ((replicate 8 ' ' ++) . unwords)
. chunksOf 0x10
. map formatByte
. BS.unpack
instance (Display a, Display b) => Display (a, b) where
display (a, b) = concat [ "(", display a, ", ", display b, ")"]
instance (Show a) => Display (V2 a)
instance (Show a) => Display (V3 a)
instance (Show a) => Display (V4 a)
instance Display [Word8]
instance Display [Word32]
convertEnum e = if enumIsBetween mn mx e then Just r else Nothing
where [mn, mx, _] = [minBound, maxBound, r]
r = toEnum e
enumIsBetween :: (Enum a) => a -> a -> Int -> Bool
enumIsBetween a z x = x >= fromEnum a && x <= fromEnum z
showHex n | n >= 0 = "0x" ++ Numeric.showHex n ""
| otherwise = "-" ++ showHex (-n)
showHexPadded n
| n >= 0 = "0x" ++ pad ++ s
| otherwise = "-" ++ showHexPadded (-n)
where s = Numeric.showHex n ""
pad = replicate (max 0 $ 8 - length s) '0'
readBinary :: (Num a, Bits a) => String -> a
readBinary b = zeroBits & bits .@~ ((map (/= '0') (reverse b) ++ repeat False) !!)
showBinary :: (Num a, Bits a) => a -> String
showBinary b = (\i -> if i then '1' else '0') <$> toListOf bits b
showAbsOffset (-1) = "(INVALID) "
showAbsOffset o = showHexPadded o
formatBytes :: Int -> Int -> BS.ByteString -> [String]
formatBytes o1 o2 _ | o2 < o1 = []
formatBytes o1 o2 bs
| BS.all (== 0) bs' = ["..."]
| otherwise = zipWith formatLine offsets byteLines
where l = 1 + o2 - o1
b' = formatByte <$> take l (drop o1 b)
prefixSz = 16 - o1 `mod` 16
firstLine = replicate (16 - prefixSz) " " ++ take prefixSz b'
otherLines = chunksOf 16 $ drop prefixSz b'
byteLines = firstLine : otherLines
firstOffset = o1 - o1 `mod` 16
offsets = [firstOffset, firstOffset + 0x10 .. o2]
b = BS.unpack bs
bs' = slice o1 l bs
formatLine :: Int -> [String] -> String
formatLine o l = concat [showHexPadded o, "| ", w1, " ", w2, " ", w3, " ", w4, " ", printedChars]
where l' = chunksOf 4 l ++ repeat (replicate 4 " ")
(w1:w2:w3:w4:_) = intercalate " " <$> l'
-- printedChars = chunksOf 4 $ showPrintableChar <$> unwords [w1, w2, w3, w4]
printedChars = showPrintableChar <$> concat (take 4 l')
formatVertIxs :: (Show a) => [a] -> String
formatVertIxs = unlines . map (intercalate " ") . chunksOf 3 . map show . chunksOf 3
showPrintableChar :: String -> Char
showPrintableChar "" = ' '
showPrintableChar " " = ' '
showPrintableChar " " = ' '
showPrintableChar s
| c >= 32 && c < 127 = toEnum c
| otherwise = '.'
where c = case Numeric.readHex s of
[(h, "")] -> h
_ -> 0
formatByte b
| b < 0x10 = "0" ++ Numeric.showHex b ""
| otherwise = Numeric.showHex b ""
slice :: (Integral a) => a -> a -> BS.ByteString -> BS.ByteString
slice o l = BS.take (fromIntegral l) . BS.drop (fromIntegral o)
breakNybbles :: Word8 -> [Word8]
breakNybbles w = [w^.nybbleAt 1, w^.nybbleAt 0]
pack16le :: [Word8] -> [Word16]
pack16le = map (Bin.runGet Bin.getWord16le . BSL.pack) . chunksOf 2
pack32le :: [Word8] -> [Word32]
pack32le = map (Bin.runGet Bin.getWord32le . BSL.pack) . chunksOf 4
btAt :: (Integral a, Num a, Bits a) => Int -> Int -> Lens' a Word8
btAt n = \p -> lens (btGet n p) (btSet n p)
btMask :: (Bits a) => Int -> a
btMask n = foldl' setBit zeroBits [0..n - 1]
btGet :: (Integral a, Num a, Bits a) => Int -> Int -> a -> Word8
btGet n p w = fromIntegral $ w `shiftR` p .&. btMask n
btSet :: (Integral a, Num a, Bits a) => Int -> Int -> a -> Word8 -> a
btSet n p w v = w .&. invMask .|. v'
where ix' = p
v' = fromIntegral (v .&. btMask n) `shiftL` ix'
invMask = complement $ btMask n `shiftL` ix'
nybbleAt :: (Integral a, Num a, Bits a) => Int -> Lens' a Word8
nybbleAt ix = btAt 4 (ix * 4)
silence :: (MonadWriter w m) => m a -> m a
silence = censor (const mempty)
| isomorphism/labelled-hexdump-parser | Util.hs | bsd-3-clause | 5,165 | 0 | 14 | 1,297 | 2,124 | 1,106 | 1,018 | 132 | 2 |
module Language.CFamily.C.Analysis.TypeConversions (
arithmeticConversion,
floatConversion,
intConversion
) where
import Language.CFamily.C.Analysis.SemRep
-- | For an arithmetic operator, if the arguments are of the given
-- types, return the type of the full expression.
arithmeticConversion :: TypeName -> TypeName -> Maybe TypeName
-- XXX: I'm assuming that double `op` complex float = complex
-- double. The standard seems somewhat unclear on whether this is
-- really the case.
arithmeticConversion (TyComplex t1) (TyComplex t2) =
Just $ TyComplex $ floatConversion t1 t2
arithmeticConversion (TyComplex t1) (TyFloating t2) =
Just $ TyComplex $ floatConversion t1 t2
arithmeticConversion (TyFloating t1) (TyComplex t2) =
Just $ TyComplex $ floatConversion t1 t2
arithmeticConversion t1@(TyComplex _) (TyIntegral _) = Just t1
arithmeticConversion (TyIntegral _) t2@(TyComplex _) = Just t2
arithmeticConversion (TyFloating t1) (TyFloating t2) =
Just $ TyFloating $ floatConversion t1 t2
arithmeticConversion t1@(TyFloating _) (TyIntegral _) = Just t1
arithmeticConversion (TyIntegral _) t2@(TyFloating _) = Just t2
arithmeticConversion (TyIntegral t1) (TyIntegral t2) =
Just $ TyIntegral $ intConversion t1 t2
arithmeticConversion (TyEnum _) (TyEnum _) = Just $ TyIntegral TyInt
arithmeticConversion (TyEnum _) t2 = Just $ t2
arithmeticConversion t1 (TyEnum _) = Just $ t1
arithmeticConversion _ _ = Nothing
floatConversion :: FloatType -> FloatType -> FloatType
floatConversion = max
intConversion :: IntType -> IntType -> IntType
intConversion t1 t2 = max TyInt (max t1 t2)
| micknelso/language-c | src/Language/CFamily/C/Analysis/TypeConversions.hs | bsd-3-clause | 1,609 | 0 | 8 | 249 | 469 | 241 | 228 | 28 | 1 |
-- |
-- Module : Data.Pack.Endianness
-- License : BSD-style
-- Maintainer : capsjac <capsjac at gmail dot com>
-- Stability : Experimental
-- Portability : Portable
--
{-# LANGUAGE CPP #-}
module Data.Pack.Endianness
( swap16
, swap32
, swap64
, le16Host
, le32Host
, le64Host
, be16Host
, be32Host
, be64Host
) where
import Data.Bits
import Data.Word
#if MIN_VERSION_base(4,7,0)
-- | Swap endianness on a Word16.
swap16 :: Word16 -> Word16
swap16 = byteSwap16
-- | Swap endianness on a Word32.
swap32 :: Word32 -> Word32
swap32 = byteSwap32
-- | Swap endianness on a Word64.
swap64 :: Word64 -> Word64
swap64 = byteSwap64
#else
#if BITS_IS_OLD
shr :: Bits a => a -> Int -> a
shr = shiftR
shl :: Bits a => a -> Int -> a
shl = shiftL
#else
shr :: Bits a => a -> Int -> a
shr = unsafeShiftR
shl :: Bits a => a -> Int -> a
shl = unsafeShiftL
#endif
-- | Swap endianness on a Word64.
-- 56 48 40 32 24 16 8 0
-- a b c d e f g h
-- h g f e d c b a
swap64 :: Word64 -> Word64
swap64 w =
(w `shr` 56) .|. (w `shl` 56)
.|. ((w `shr` 40) .&. 0xff00) .|. ((w .&. 0xff00) `shl` 40)
.|. ((w `shr` 24) .&. 0xff0000) .|. ((w .&. 0xff0000) `shl` 24)
.|. ((w `shr` 8) .&. 0xff000000) .|. ((w .&. 0xff000000) `shl` 8)
-- | Swap endianness on a Word32.
swap32 :: Word32 -> Word32
swap32 w =
(w `shr` 24) .|. (w `shl` 24)
.|. ((w `shr` 8) .&. 0xff00) .|. ((w .&. 0xff00) `shl` 8)
-- | Swap endianness on a Word16.
swap16 :: Word16 -> Word16
swap16 w = (w `shr` 8) .|. (w `shl` 8)
#endif
#ifdef CPU_BIG_ENDIAN
#define FromBE(bits) id
#define FromLE(bits) swap/**/bits
#else
#define FromBE(bits) swap/**/bits
#define FromLE(bits) id
#endif
-- | 16 bit big endian to host endian.
be16Host :: Word16 -> Word16
be16Host = FromBE(16)
-- | 32 bit big endian to host endian.
be32Host :: Word32 -> Word32
be32Host = FromBE(32)
-- | 64 bit big endian to host endian.
be64Host :: Word64 -> Word64
be64Host = FromBE(64)
-- | 16 bit little endian to host endian.
le16Host :: Word16 -> Word16
le16Host = FromLE(16)
-- | 32 bit little endian to host endian.
le32Host :: Word32 -> Word32
le32Host = FromLE(32)
-- | 64 bit little endian to host endian.
le64Host :: Word64 -> Word64
le64Host = FromLE(64)
| capsjac/pack | Data/Pack/Endianness.hs | bsd-3-clause | 2,301 | 0 | 14 | 554 | 517 | 317 | 200 | 41 | 1 |
{-#LANGUAGE RecordWildCards #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE QuasiQuotes, TemplateHaskell, TypeFamilies, TypeApplications #-}
{-# LANGUAGE OverloadedStrings, GADTs, FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving, MultiParamTypeClasses #-}
module DirectoryServer where
import Network hiding (accept, sClose)
import Network.Socket hiding (send, recv, sendTo, recvFrom, Broadcast)
import Network.Socket.ByteString
import Data.ByteString.Char8 (pack, unpack)
import System.Environment
import System.IO
import Control.Concurrent
import Control.Concurrent.STM
import Control.Exception
import Control.Monad (forever, when, join)
import Data.List.Split
import Data.Word
import Text.Printf (printf)
import System.Directory
import Data.Map (Map) -- from the `containers` library
import Data.Time
import System.Random
import qualified Data.Map as M
import LRUCache as C
import Data.Hashable
type Uuid = Int
type Address = String
type Port = String
type Filename = String
type Timestamp = IO String
--Server data type allows me to pass address and port details easily
data DirectoryServer = DirectoryServer
{ address :: String
, port :: String
, filemappings :: TVar (M.Map Filename Filemapping)
, fileservers :: TVar (M.Map Uuid Fileserver)
, fileservercount :: TVar Int
}
--Constructor
newDirectoryServer :: String -> String -> IO DirectoryServer
newDirectoryServer address port = atomically $ do DirectoryServer <$> return address <*> return port <*> newTVar M.empty <*> newTVar M.empty <*> newTVar 0
addFilemapping :: DirectoryServer -> Filename -> Uuid -> Address -> Port -> Timestamp -> STM ()
addFilemapping DirectoryServer{..} filename uuid fmaddress fmport timestamp = do
fm <- newFilemapping filename uuid fmaddress fmport timestamp
modifyTVar filemappings . M.insert filename $ fm
addFileserver :: DirectoryServer -> Uuid -> Address -> Port -> STM ()
addFileserver DirectoryServer{..} uuid fsaddress fsport = do
fs <- newFileserver uuid fsaddress fsport
modifyTVar fileservers . M.insert uuid $ fs
lookupFilemapping :: DirectoryServer -> Filename -> STM (Maybe Filemapping)
lookupFilemapping DirectoryServer{..} filename = M.lookup filename <$> readTVar filemappings
lookupFileserver :: DirectoryServer -> Uuid -> STM (Maybe Fileserver)
lookupFileserver DirectoryServer{..} uuid = M.lookup uuid <$> readTVar fileservers
data Filemapping = Filemapping
{ fmfilename :: Filename
, fmuuid :: Uuid
, fmaddress :: Address
, fmport :: Port
, fmtimestamp :: Timestamp
}
newFilemapping :: Filename -> Uuid -> Address -> Port -> Timestamp -> STM Filemapping
newFilemapping fmfilename fmuuid fmaddress fmport fmtimestamp = Filemapping <$> return fmfilename <*> return fmuuid <*> return fmaddress <*> return fmport <*> return fmtimestamp
getFilemappinguuid :: Filemapping -> Uuid
getFilemappinguuid Filemapping{..} = fmuuid
getFilemappingaddress :: Filemapping -> Address
getFilemappingaddress Filemapping{..} = fmaddress
getFilemappingport :: Filemapping -> Port
getFilemappingport Filemapping{..} = fmport
getFilemappingtimestamp :: Filemapping -> Timestamp
getFilemappingtimestamp Filemapping{..} = fmtimestamp
data Fileserver = Fileserver
{ fsuuid :: Uuid
, fsaddress :: HostName
, fsport :: Port
}
newFileserver :: Uuid -> Address -> Port -> STM Fileserver
newFileserver fsuuid fsaddress fsport = Fileserver <$> return fsuuid <*> return fsaddress <*> return fsport
getFileserveraddress :: Fileserver -> HostName
getFileserveraddress Fileserver{..} = fsaddress
getFileserverport :: Fileserver -> Port
getFileserverport Fileserver{..} = fsport
--4 is easy for testing the pooling
maxnumThreads = 4
serverport :: String
serverport = "7008"
serverhost :: String
serverhost = "localhost"
dirrun:: IO ()
dirrun = withSocketsDo $ do
--Command line arguments for port and address
--args <- getArgs
server <- newDirectoryServer serverhost serverport
--sock <- listenOn (PortNumber (fromIntegral serverport))
addrinfos <- getAddrInfo
(Just (defaultHints {addrFlags = [AI_PASSIVE]}))
Nothing (Just serverport)
let serveraddr = head addrinfos
sock <- socket (addrFamily serveraddr) Stream defaultProtocol
bindSocket sock (addrAddress serveraddr)
listen sock 5
_ <- printf "Listening on port %s\n" serverport
--Listen on port from command line argument
--New Abstract FIFO Channel
chan <- newChan
--New Cache
cache <- C.newHandle 5
--Tvars are variables Stored in memory, this way we can access the numThreads from any method
numThreads <- atomically $ newTVar 0
--Spawns a new thread to handle the clientconnectHandler method, passes socket, channel, numThreads and server
forkIO $ clientconnectHandler sock chan numThreads server cache
--Calls the mainHandler which will monitor the FIFO channel
mainHandler sock chan
mainHandler :: Socket -> Chan String -> IO ()
mainHandler sock chan = do
--Read current message on the FIFO channel
chanMsg <- readChan chan
--If KILL_SERVICE, stop mainHandler running, If anything else, call mainHandler again, keeping the service running
case (chanMsg) of
("KILL_SERVICE") -> putStrLn "Terminating the Service!"
_ -> mainHandler sock chan
clientconnectHandler :: Socket -> Chan String -> TVar Int -> DirectoryServer -> (C.Handle String String) -> IO ()
clientconnectHandler sock chan numThreads server cache = do
--Accept the socket which returns a handle, host and port
--(handle, host, port) <- accept sock
(s,a) <- accept sock
--handle <- socketToHandle s ReadWriteMode
--Read numThreads from memory and print it on server console
count <- atomically $ readTVar numThreads
putStrLn $ "numThreads = " ++ show count
--If there are still threads remaining create new thread and increment (exception if thread is lost -> decrement), else tell user capacity has been reached
if (count < maxnumThreads) then do
forkFinally (clientHandler s chan server cache) (\_ -> atomically $ decrementTVar numThreads)
atomically $ incrementTVar numThreads
else do
send s (pack ("Maximum number of threads in use. try again soon"++"\n\n"))
sClose s
clientconnectHandler sock chan numThreads server cache
clientHandler :: Socket -> Chan String -> DirectoryServer -> (C.Handle String String) -> IO ()
clientHandler sock chan server@DirectoryServer{..} cache =
forever $ do
message <- recv sock 1024
let msg = unpack message
print $ msg ++ "!ENDLINE!"
let cmd = head $ words $ head $ splitOn ":" msg
print cmd
case cmd of
("HELO") -> heloCommand sock server $ (words msg) !! 1
("KILL_SERVICE") -> killCommand chan sock
("DOWNLOAD") -> downloadCommand sock server msg cache
("UPLOAD") -> uploadCommand sock server msg
("JOIN") -> joinCommand sock server msg
("UPDATE") -> updateCommand sock server msg
_ -> do send sock (pack ("Unknown Command - " ++ msg ++ "\n\n")) ; return ()
--Function called when HELO text command recieved
heloCommand :: Socket -> DirectoryServer -> String -> IO ()
heloCommand sock DirectoryServer{..} msg = do
send sock $ pack $ "HELO " ++ msg ++ "\n" ++
"IP:" ++ "192.168.6.129" ++ "\n" ++
"Port:" ++ port ++ "\n" ++
"StudentID:12306421\n\n"
return ()
killCommand :: Chan String -> Socket -> IO ()
killCommand chan sock = do
send sock $ pack $ "Service is now terminating!"
writeChan chan "KILL_SERVICE"
returnb :: a -> IO a
returnb a = return a
downloadCommand :: Socket -> DirectoryServer -> String -> (C.Handle String String) -> IO ()
downloadCommand sock server@DirectoryServer{..} command cache = do
let clines = splitOn "\\n" command
filename = (splitOn ":" $ clines !! 1) !! 1
-- let k = filename
let k = filename
incache <- C.iolookup cache k
case incache of
(Nothing) -> do
fm <- atomically $ lookupFilemapping server filename
case fm of
(Nothing) -> send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\n" ++
"STATUS: " ++ "File not found" ++ "\n\n"
(Just fm) -> do forkIO $ downloadmsg filename (getFilemappingaddress fm) (getFilemappingport fm) sock cache
send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\n" ++
"STATUS: " ++ "SUCCESSFUL" ++ "\n\n"
(Just v) -> do print "Cache hit"
ioinsert cache filename v
send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\n" ++
"DATA: " ++ show v ++ "\n\n"
return ()
downloadmsg :: String -> String -> String -> Socket -> (C.Handle String String) -> IO()
downloadmsg filename host port sock cache = do
addrInfo <- getAddrInfo (Just (defaultHints {addrFlags = [AI_PASSIVE]})) Nothing (Just "7007")
let serverAddr = head addrInfo
clsock <- socket (addrFamily serverAddr) Stream defaultProtocol
connect clsock (addrAddress serverAddr)
send clsock $ pack $ "DOWNLOAD:FILE" ++ "\\n" ++
"FILENAME:" ++ filename ++ "\\n\n"
resp <- recv clsock 1024
let msg = unpack resp
let clines = splitOn "\\n" msg
fdata = (splitOn ":" $ clines !! 1) !! 1
sClose clsock
send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\n" ++
"DATA: " ++ fdata ++ "\n\n"
ioinsert cache filename fdata
return ()
returndata :: String -> Socket -> String -> IO ()
returndata filename sock fdata = do
send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\\n" ++
"DATA: " ++ fdata ++ "\n\n"
return ()
uploadCommand :: Socket -> DirectoryServer ->String -> IO ()
uploadCommand sock server@DirectoryServer{..} command = do
let clines = splitOn "\\n" command
filename = (splitOn ":" $ clines !! 1) !! 1
fdata = (splitOn ":" $ clines !! 2) !! 1
fm <- atomically $ lookupFilemapping server filename
case fm of
(Just fm) -> send sock $ pack $ "UPLOAD: " ++ filename ++ "\n" ++
"STATUS: " ++ "File Already Exists" ++ "\n\n"
(Nothing) -> do numfs <- atomically $ M.size <$> readTVar fileservers
rand <- randomRIO (0, (numfs-1))
fs <- atomically $ lookupFileserver server rand
case fs of
(Nothing) -> send sock $ pack $ "UPLOAD: " ++ filename ++ "\n"++
"FAILED: " ++ "No valid Fileserver found to host" ++ "\n\n"
(Just fs) -> do forkIO $ uploadmsg sock filename fdata fs rand server
fm <- atomically $ newFilemapping filename rand (getFileserveraddress fs) (getFileserverport fs) (fmap show getZonedTime)
atomically $ addFilemapping server filename rand (getFileserveraddress fs) (getFileserverport fs) (fmap show getZonedTime)
send sock $ pack $ "UPLOAD: " ++ filename ++ "\\n" ++
"STATUS: " ++ "Successfull" ++ "\n\n"
return ()
uploadmsg :: Socket -> String -> String -> Fileserver -> Int -> DirectoryServer -> IO ()
uploadmsg sock filename fdata fs rand server@DirectoryServer{..} = withSocketsDo $ do
addrInfo <- getAddrInfo (Just (defaultHints {addrFlags = [AI_PASSIVE]})) Nothing (Just (getFileserverport fs))
let serverAddr = head addrInfo
clsock <- socket (addrFamily serverAddr) Stream defaultProtocol
connect clsock (addrAddress serverAddr)
send clsock $ pack $ "UPLOAD:FILE" ++ "\\n" ++
"FILENAME:" ++ filename ++ "\\n" ++
"DATA:" ++ fdata ++ "\\n"
resp <- recv clsock 1024
sClose clsock
let msg = unpack resp
print $ msg ++ "!ENDLINE!"
let clines = splitOn "\\n" msg
status = (splitOn ":" $ clines !! 1) !! 1
return ()
joinCommand :: Socket -> DirectoryServer ->String -> IO ()
joinCommand sock server@DirectoryServer{..} command = do
let clines = splitOn "\\n" command
newaddress = (splitOn ":" $ clines !! 1) !! 1
newport = (splitOn ":" $ clines !! 2) !! 1
nodeID <- atomically $ readTVar fileservercount
fs <- atomically $ newFileserver nodeID newaddress newport
atomically $ addFileserver server nodeID newaddress newport
atomically $ incrementFileserverCount fileservercount
send sock $ pack $ "JOINED DISTRIBUTED FILE SERVICE as fileserver: " ++ (show nodeID) ++ "\n\n"
return ()
updateCommand :: Socket -> DirectoryServer ->String -> IO ()
updateCommand sock server@DirectoryServer{..} command = do
let clines = splitOn "\\n" command
filename = (splitOn ":" $ clines !! 1) !! 1
fdata = (splitOn ":" $ clines !! 2) !! 1
fm <- atomically $ lookupFilemapping server filename
case fm of
(Nothing) -> send sock $ pack $ "UPDATE: " ++ filename ++ "\n" ++
"STATUS: " ++ "File Doesnt Exists" ++ "\n\n"
(Just fm) -> do fs <- atomically $ lookupFileserver server (getFilemappinguuid fm)
case fs of
(Nothing) -> send sock $ pack $ "UPDATE: " ++ filename ++ "\n"++
"FAILED: " ++ "No valid Fileserver found to host" ++ "\n\n"
(Just fs) -> do forkIO $ updatemsg sock filename fdata fs (getFilemappinguuid fm) server
fm <- atomically $ newFilemapping filename (getFilemappinguuid fm) (getFileserveraddress fs) (getFileserverport fs) (fmap show getZonedTime)
atomically $ addFilemapping server filename (getFilemappinguuid fm) (getFileserveraddress fs) (getFileserverport fs) (fmap show getZonedTime)
send sock $ pack $ "UPDATE: " ++ filename ++ "\\n" ++
"STATUS: " ++ "Successfull" ++ "\n\n"
return ()
updatemsg :: Socket -> String -> String -> Fileserver -> Int -> DirectoryServer -> IO ()
updatemsg sock filename fdata fs rand server@DirectoryServer{..} = withSocketsDo $ do
addrInfo <- getAddrInfo (Just (defaultHints {addrFlags = [AI_PASSIVE]})) Nothing (Just (getFileserverport fs))
let serverAddr = head addrInfo
clsock <- socket (addrFamily serverAddr) Stream defaultProtocol
connect clsock (addrAddress serverAddr)
send clsock $ pack $ "UPDATE:FILE" ++ "\\n" ++
"FILENAME:" ++ filename ++ "\\n" ++
"DATA:" ++ fdata ++ "\\n"
resp <- recv clsock 1024
sClose clsock
let msg = unpack resp
print $ msg ++ "!ENDLINE!"
let clines = splitOn "\\n" msg
status = (splitOn ":" $ clines !! 1) !! 1
return ()
--Increment Tvar stored in memory i.e. numThreads
incrementTVar :: TVar Int -> STM ()
incrementTVar tv = modifyTVar tv ((+) 1)
--Decrement Tvar stored in memory i.e. numThreads
decrementTVar :: TVar Int -> STM ()
decrementTVar tv = modifyTVar tv (subtract 1)
incrementFileserverCount :: TVar Int -> STM ()
incrementFileserverCount tv = modifyTVar tv ((+) 1)
| Garygunn94/DFS | .stack-work/intero/intero8753hOX.hs | bsd-3-clause | 15,452 | 456 | 15 | 4,001 | 4,189 | 2,160 | 2,029 | 276 | 7 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DoAndIfThenElse #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Mismi.Control (
A.AWS
, A.Error
, A.AccessKey
, A.SecretKey
, A.SessionToken
, A.Region (..)
, runAWS
, runAWST
, runAWSTWith
, runAWSTWithRegion
, rawRunAWS
, runAWSWithRegion
, newEnvFromCreds
, awsBracket
, awsBracket_
, renderError
, onStatus
, onStatus_
, handle404
, handle403
, handle301
, setServiceRetry
, setRetry
, configureRetries
, handleServiceError
, withRetries
, withRetriesOf
, throwOrRetry
, throwOrRetryOf
) where
import Control.Exception (IOException)
import Control.Lens ((.~), (^.), (^?), over)
import Control.Monad.Catch
import Control.Monad.IO.Class
import Control.Monad.Reader
import Control.Retry (RetryPolicyM, RetryStatus)
import Control.Retry (fullJitterBackoff, recovering, rsIterNumber, applyPolicy)
import qualified Data.ByteString.Lazy as BL
import Data.ByteString.Builder
import Data.Text as T
import Data.Text.Encoding as T
import Mismi.Environment
import Network.AWS hiding (runAWS)
import qualified Network.AWS as A
import Network.AWS.Data
import Network.AWS.Error
import Network.HTTP.Client (HttpException (..))
#if MIN_VERSION_http_client(0,5,0)
import Network.HTTP.Client (HttpExceptionContent (..), responseTimeoutMicro, responseStatus)
#endif
import Network.HTTP.Client.Internal (mResponseTimeout)
import Network.HTTP.Types.Status
import P
import System.IO
import X.Control.Monad.Trans.Either
runAWST :: Env -> (Error -> e) -> EitherT e AWS a -> EitherT e IO a
runAWST e err action =
runAWSTWith (runAWS e) err action
runAWSTWithRegion :: Region -> (Error -> e) -> EitherT e AWS a -> EitherT e IO a
runAWSTWithRegion r err action =
runAWSTWith (runAWSWithRegion r) err action
runAWSTWith :: (forall b. AWS b -> EitherT Error IO b) -> (Error -> e) -> EitherT e AWS a -> EitherT e IO a
runAWSTWith run err action =
joinErrors id err $ mapEitherT run action
runAWS :: (MonadIO m, MonadCatch m) => Env -> AWS a -> EitherT Error m a
runAWS e'' =
let
e' = over envManager (\m -> m { mResponseTimeout =
#if MIN_VERSION_http_client(0,5,0)
responseTimeoutMicro 60000000
#else
Just 60000000
#endif
}) e''
e = configureRetries 5 e'
in
EitherT . try . liftIO . rawRunAWS e
runAWSWithRegion :: (MonadIO m, MonadCatch m) => Region -> AWS a -> EitherT Error m a
runAWSWithRegion r a = do
e <- liftIO $ discoverAWSEnvWithRegion r
runAWS e a
newEnvFromCreds :: (Applicative m, MonadIO m, MonadCatch m) => Region -> AccessKey -> SecretKey -> Maybe SessionToken -> m Env
newEnvFromCreds r ak sk st = do
#if MIN_VERSION_amazonka(1,4,4)
e <- newEnv $ case st of
Nothing ->
FromKeys ak sk
Just st' ->
FromSession ak sk st'
pure $ e & envRegion .~ r
#else
newEnv r $ case st of
Nothing ->
FromKeys ak sk
Just st' ->
FromSession ak sk st'
#endif
rawRunAWS :: Env -> AWS a -> IO a
rawRunAWS e =
runResourceT . A.runAWS e
awsBracket :: AWS a -> (a -> AWS c) -> (a -> AWS b) -> AWS b
awsBracket r f a = do
e <- ask
liftIO $ bracket (unsafeRunAWS e r) (unsafeRunAWS e . f) (unsafeRunAWS e . a)
awsBracket_ :: AWS a -> AWS c -> AWS b -> AWS b
awsBracket_ r f a =
awsBracket r (const f) (const a)
unsafeRunAWS :: Env -> AWS a -> IO a
unsafeRunAWS e a =
eitherT throwM pure $ runAWS e a
renderError :: Error -> Text
renderError =
decodeUtf8 . BL.toStrict . toLazyByteString . build
setServiceRetry :: Retry -> AWS a -> AWS a
setServiceRetry r =
local (override (serviceRetry .~ r))
setRetry :: Int -> AWS a -> AWS a
setRetry =
local . configureRetries
withRetries :: (MonadCatch m, MonadMask m, MonadIO m) => Int -> m a -> m a
withRetries =
withRetriesOf (fullJitterBackoff 500000)
withRetriesOf :: (MonadCatch m, MonadMask m, MonadIO m) => RetryPolicyM m -> Int -> m a -> m a
withRetriesOf policy n action = do
recovering policy [httpCondition n, ioCondition n] $ \_ ->
action
httpCondition :: Applicative m => Int -> RetryStatus -> Handler m Bool
httpCondition n s =
Handler $ \(e :: HttpException) ->
pure $
if rsIterNumber s > n
then False
else checkException e False
ioCondition :: Applicative m => Int -> RetryStatus -> Handler m Bool
ioCondition n s =
Handler $ \(_ :: IOException) ->
pure $ rsIterNumber s < n
throwOrRetry ::
(MonadCatch m, MonadMask m, MonadIO m)
=> Int
-> SomeException
-> RetryStatus
-> m RetryStatus
throwOrRetry =
throwOrRetryOf (fullJitterBackoff 500000)
throwOrRetryOf ::
(MonadCatch m, MonadMask m, MonadIO m)
=> RetryPolicyM m
-> Int
-> SomeException
-> RetryStatus
-> m RetryStatus
throwOrRetryOf policy n ex0 s0 =
let
recover = \case
[] ->
throwM ex0
h0 : hs ->
case h0 s0 of
Handler h ->
case fromException ex0 of
Nothing ->
recover hs
Just ex -> do
ok <- h ex
if ok then do
ms <- applyPolicy policy s0
case ms of
Nothing ->
throwM ex
Just s ->
pure s
else
throwM ex
in
recover [httpCondition n, ioCondition n]
configureRetries :: Int -> Env -> Env
configureRetries i e = e & envRetryCheck .~ err
where
err c _ | c >= i = False
err c v =
checkException v $ (e ^. envRetryCheck) c v
checkException :: HttpException -> Bool -> Bool
checkException v f =
case v of
#if MIN_VERSION_http_client(0,5,0)
InvalidUrlException _ _ ->
False
HttpExceptionRequest _req content ->
case content of
NoResponseDataReceived ->
True
StatusCodeException resp _ ->
let status = responseStatus resp in
status == status500 || status == status503
ResponseTimeout ->
True
ConnectionTimeout ->
True
ConnectionFailure _ ->
True
ResponseBodyTooShort _ _ ->
True
InternalException _ ->
True
InvalidStatusLine _ ->
True
InvalidHeader _ ->
True
ProxyConnectException _ _ _ ->
True
WrongRequestBodyStreamSize _ _ ->
True
InvalidChunkHeaders ->
True
IncompleteHeaders ->
True
HttpZlibException _ ->
True
TooManyRedirects _ ->
False
OverlongHeaders ->
False
TlsNotSupported ->
False
InvalidDestinationHost _ ->
False
InvalidProxyEnvironmentVariable _ _ ->
False
_ ->
f
#else
NoResponseDataReceived ->
True
StatusCodeException status _ _ ->
status == status500 || status == status503
FailedConnectionException _ _ ->
True
FailedConnectionException2 _ _ _ _ ->
True
TlsException _ ->
True
InternalIOException _ ->
True
HandshakeFailed ->
True
ResponseTimeout ->
True
ResponseBodyTooShort _ _ ->
True
_ ->
f
#endif
handle404 :: AWS a -> AWS (Maybe a)
handle404 =
handleStatus status404
handle403 :: AWS a -> AWS (Maybe a)
handle403 =
handleStatus status403
handle301 :: AWS a -> AWS (Maybe a)
handle301 =
handleStatus status301
handleStatus :: Status -> AWS a -> AWS (Maybe a)
handleStatus s m =
fmap Just m `catch` \(e :: Error) ->
if e ^? httpStatus == Just s then return Nothing else throwM e
-- | Return a result code depending on the HTTP status
onStatus :: (Status -> Maybe r) -> AWS a -> AWS (Either r a)
onStatus f m =
fmap Right m `catch` \(e :: Error) ->
case e ^? httpStatus >>= f of
Just r1 ->
return (Left r1)
Nothing ->
throwM e
-- | Return a result code depending on the HTTP status
-- for an AWS action returning no value
onStatus_ :: r -> (Status -> Maybe r) -> AWS () -> AWS r
onStatus_ r f m =
fmap (const r) m `catch` \(e :: Error) ->
case e ^? httpStatus >>= f of
Just r1 ->
return r1
Nothing ->
throwM e
handleServiceError :: (ServiceError -> Bool) -> (ServiceError -> a) -> AWS a -> AWS a
handleServiceError f pass action =
action `catch` \(e :: Error) ->
case e of
ServiceError se ->
if f se
then pure $ pass se
else throwM e
SerializeError _ ->
throwM e
TransportError _ ->
throwM e
| ambiata/mismi | mismi-core/src/Mismi/Control.hs | bsd-3-clause | 8,864 | 3 | 26 | 2,697 | 2,577 | 1,325 | 1,252 | 232 | 10 |
module Rules.Program (buildProgram) where
import Hadrian.Haskell.Cabal
import Hadrian.Haskell.Cabal.PackageData as PD
import Base
import Context
import Expression hiding (stage, way)
import GHC
import Oracles.ModuleFiles
import Oracles.Flag (crossCompiling)
import Settings
import Settings.Packages.Rts
import Target
import Utilities
-- | TODO: Drop code duplication
buildProgram :: [(Resource, Int)] -> Rules ()
buildProgram rs = do
root <- buildRootRules
forM_ [Stage0 ..] $ \stage ->
[ root -/- stageString stage -/- "bin" -/- "*"
, root -/- stageString stage -/- "lib/bin" -/- "*" ] |%> \bin -> do
-- This is quite inefficient, but we can't access 'programName' from
-- 'Rules', because it is an 'Action' depending on an oracle.
sPackages <- filter isProgram <$> stagePackages stage
tPackages <- testsuitePackages
-- TODO: Shall we use Stage2 for testsuite packages instead?
let allPackages = sPackages
++ if stage == Stage1 then tPackages else []
nameToCtxList <- forM allPackages $ \pkg -> do
let ctx = vanillaContext stage pkg
name <- programName ctx
return (name <.> exe, ctx)
case lookup (takeFileName bin) nameToCtxList of
Nothing -> error $ "Unknown program " ++ show bin
Just (Context {..}) -> do
-- Custom dependencies: this should be modeled better in the
-- Cabal file somehow.
-- TODO: Is this still needed? See 'runtimeDependencies'.
when (package == hsc2hs) $ do
-- 'Hsc2hs' needs the @template-hsc.h@ file.
template <- templateHscPath stage
need [template]
when (package == ghc) $ do
-- GHC depends on @settings@, @platformConstants@,
-- @llvm-targets@, @ghc-usage.txt@, @ghci-usage.txt@,
-- @llvm-passes@.
need =<< ghcDeps stage
cross <- crossCompiling
-- For cross compiler, copy @stage0/bin/<pgm>@ to @stage1/bin/@.
case (cross, stage) of
(True, s) | s > Stage0 -> do
srcDir <- buildRoot <&> (-/- (stageString Stage0 -/- "bin"))
copyFile (srcDir -/- takeFileName bin) bin
_ -> buildBinary rs bin =<< programContext stage package
buildBinary :: [(Resource, Int)] -> FilePath -> Context -> Action ()
buildBinary rs bin context@Context {..} = do
binDeps <- if stage == Stage0 && package == ghcCabal
then hsSources context
else do
needLibrary =<< contextDependencies context
when (stage > Stage0) $ do
ways <- interpretInContext context (getLibraryWays <> getRtsWays)
needLibrary [ rtsContext { way = w } | w <- ways ]
cSrcs <- interpretInContext context (getPackageData PD.cSrcs)
cObjs <- mapM (objectPath context) cSrcs
hsObjs <- hsObjects context
return $ cObjs ++ hsObjs
need binDeps
buildWithResources rs $ target context (Ghc LinkHs stage) binDeps [bin]
synopsis <- pkgSynopsis context
putSuccess $ renderProgram
(quote (pkgName package) ++ " (" ++ show stage ++ ").") bin synopsis
| bgamari/shaking-up-ghc | src/Rules/Program.hs | bsd-3-clause | 3,525 | 0 | 29 | 1,226 | 821 | 413 | 408 | -1 | -1 |
--
-- Circuit compiler for the Faerieplay hardware-assisted secure
-- computation project at Dartmouth College.
--
-- Copyright (C) 2003-2007, Alexander Iliev <[email protected]> and
-- Sean W. Smith <[email protected]>
--
-- All rights reserved.
--
-- This code is released under a BSD license.
-- Please see LICENSE.txt for the full license and disclaimers.
--
-- a tree class
-- awaiting understanding of universal quantification for types...
module Faerieplay.Tree where
-- needed functions:
-- children (t) -- list of children
-- nil (t) -- sub-leaf node?
-- node (t) -- the actual node of this tree
-- use multi-parameter type classes
class LabTree t a where
children :: t a -> [t a]
-- nil :: t a -> Bool
nodeExtr :: t a -> a
nodeCons :: a -> [t a] -> t a
-- nil t = False -- default
class MyTree t where
recons :: t -> [t] -> t
kids :: t -> [t]
-- isLeaf :: t -> Bool
mapTree f t = let cs = map f (kids t)
t_new = f t
in recons t_new cs
data Exp = EPlus Exp Exp
| EInt Int
data Stm = SAss Exp Exp
| SIf Exp Stm
data SynTree = TExp Exp
| TStm Stm
instance MyTree SynTree where
recons (TExp e) ts = case (e,ts) of
( (EPlus _ _) , [(TExp e1),(TExp e2)]) -> (TExp $ EPlus e1 e2)
recons (TStm s) ts = case (s,ts) of
( (SAss _ _) , [(TExp e1),(TExp e2)] ) -> (TStm $ SAss e1 e2)
( (SIf _ _) , [(TExp e1),(TStm s1)] ) -> (TStm $ SIf e1 s1)
kids (TExp (EPlus e1 e2)) = [TExp e1, TExp e2]
kids (TExp (EInt i)) = []
kids (TStm (SAss e1 e2)) = [TExp e1, TExp e2]
kids (TStm (SIf e1 s1)) = [TExp e1, TStm s1]
egTree = (TStm
data BinTree a = Node a (BinTree a) (BinTree a)
| Leaf a
deriving (Show)
instance (LabTree BinTree) a where
children (Node _ b1 b2) = [b1,b2]
children (Leaf _) = []
nodeExtr (Node x _ _) = x
nodeExtr (Leaf x) = x
nodeCons x [] = Leaf x
nodeCons x [c1,c2] = Node x c1 c2
------
-- and some tree functions
------
-- fnode fcons base tree
redtree :: (LabTree t a) => (a -> b -> c) -> (c -> b -> b) -> b -> t a -> c
redtree fnode fcons base t = fnode (nodeExtr t) (redtree' fnode fcons base (children t))
-- redtree' takes (fnode, fcons, start) and a list of trees
-- returns
where redtree' :: (LabTree t a) => (a -> b -> c) -> (c -> b -> b) -> b -> [t a] -> b
-- redtree' fnode fcons base (t:ts)
-- | nil(t) = fcons base (redtree' fnode fcons base ts)
redtree' _ _ base [] = base
redtree' fnode fcons base (t:ts) = fcons (redtree fnode fcons base t) (redtree' fnode fcons base ts)
-- example
labels :: (LabTree t a) => t a -> [a]
labels = redtree (:) (++) []
maptree :: (LabTree t a) => (a -> a) -> t a -> t a
maptree f = redtree (nodeCons . f) (:) []
testBinTree = Node 2 (Leaf 3) (Node 5 (Leaf 3) (Leaf 4))
main = print $ labels testBinTree
| ailiev/faerieplay-compiler | Faerieplay/Tree.hs | bsd-3-clause | 3,329 | 2 | 13 | 1,222 | 1,125 | 598 | 527 | -1 | -1 |
module CS.JsonDotNet.Internal.Types where
import Data.Aeson
import Data.Text
data FieldType = FInteger
| FNumber
| FString
| FBool
| FDay
| FUTCTime
| FEnum Text [Value]
| FObject Text [(Text, FieldType)]
| FList FieldType
| FNullable FieldType
| FRefObject Text
| FRefEnum Text
| FRefPrim Text FieldType
deriving Show
isFEnum :: FieldType -> Bool
isFEnum (FEnum _ _) = True
isFEnum _ = False
isFPrim :: FieldType -> Bool
isFPrim FString = True
isFPrim FInteger = True
isFPrim FNumber = True
isFPrim FBool = True
isFPrim FDay = True
isFPrim FUTCTime = True
isFPrim _ = False
isFObj :: FieldType -> Bool
isFObj (FObject _ _) = True
isFObj _ = False
data CCate = CVal | CRef | CSt deriving Show
nullable :: FieldType -> CCate
nullable FInteger = CVal
nullable FNumber = CVal
nullable FString = CRef
nullable FBool = CVal
nullable FDay = CVal
nullable FUTCTime = CVal
nullable (FEnum _ _) = CSt
nullable (FObject _ _) = CSt
nullable (FList _) = CRef
nullable (FNullable _) = CRef
nullable (FRefObject _) = CVal
nullable (FRefEnum _) = CVal
nullable (FRefPrim _ t) = nullable t
data ConverterType = NoConv
| DayConv
| EnumConv
| ItemConv ConverterType
deriving Show
| cutsea110/servant-csharp | src/CS/JsonDotNet/Internal/Types.hs | bsd-3-clause | 1,446 | 0 | 8 | 486 | 428 | 232 | 196 | 51 | 1 |
module REPL where
import Parser
import Syntax
import PrettyPrinter
import Compiler
import Evaluator
import System.Console.Haskeline
import Control.Monad.Trans
import System.IO
processProgram :: String -> IO ()
processProgram input = do
case parseTopLevelProgram_P input of
Left err -> print err
Right scDefns -> pprint scDefns
readInFile :: String -> IO ()
readInFile path = do
handle <- openFile path ReadMode
contents <- hGetContents handle
processProgram contents
putStrLn "\n"
hClose handle
readInProgram :: String -> IO ()
readInProgram path = do
handle <- openFile path ReadMode
contents <- hGetContents handle
case parseTopLevelProgram_P contents of
Left err -> print err
Right sc_defs -> (showResults . fst . eval . compile) sc_defs
putStrLn "\n"
hClose handle
putAST :: String -> IO ()
putAST path = do
handle <- openFile path ReadMode
contents <- hGetContents handle
case parseTopLevelProgram_P contents of
Left err -> print err
Right scDefns -> mapM_ print scDefns
hClose handle
| MarkX95/TinyHask | REPL.hs | bsd-3-clause | 1,109 | 0 | 14 | 266 | 345 | 158 | 187 | 38 | 2 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RebindableSyntax #-}
module KnockoutOption where
import RebindableEDSL
-- knock-out barrier option
--
knockout :: Exp R -> Exp R -> Exp R -> Int -> Contr
knockout barrier strike notional maturity =
if rObs (FX EUR DKK) 0 <= barrier `within` maturity
then zero
else if bObs (Decision X "exercise") 0
then notional # (transfer Y X EUR &
(strike # transfer X Y DKK))
else zero
| douglas-larocca/contracts | Coq/Extraction/Examples/KnockoutOption.hs | mit | 465 | 0 | 12 | 123 | 139 | 74 | 65 | 12 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.DescribeNetworkInterfaceAttribute
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Describes a network interface attribute. You can specify only one attribute
-- at a time.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeNetworkInterfaceAttribute.html>
module Network.AWS.EC2.DescribeNetworkInterfaceAttribute
(
-- * Request
DescribeNetworkInterfaceAttribute
-- ** Request constructor
, describeNetworkInterfaceAttribute
-- ** Request lenses
, dniaAttribute
, dniaDryRun
, dniaNetworkInterfaceId
-- * Response
, DescribeNetworkInterfaceAttributeResponse
-- ** Response constructor
, describeNetworkInterfaceAttributeResponse
-- ** Response lenses
, dniarAttachment
, dniarDescription
, dniarGroups
, dniarNetworkInterfaceId
, dniarSourceDestCheck
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data DescribeNetworkInterfaceAttribute = DescribeNetworkInterfaceAttribute
{ _dniaAttribute :: Maybe NetworkInterfaceAttribute
, _dniaDryRun :: Maybe Bool
, _dniaNetworkInterfaceId :: Text
} deriving (Eq, Read, Show)
-- | 'DescribeNetworkInterfaceAttribute' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dniaAttribute' @::@ 'Maybe' 'NetworkInterfaceAttribute'
--
-- * 'dniaDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'dniaNetworkInterfaceId' @::@ 'Text'
--
describeNetworkInterfaceAttribute :: Text -- ^ 'dniaNetworkInterfaceId'
-> DescribeNetworkInterfaceAttribute
describeNetworkInterfaceAttribute p1 = DescribeNetworkInterfaceAttribute
{ _dniaNetworkInterfaceId = p1
, _dniaDryRun = Nothing
, _dniaAttribute = Nothing
}
-- | The attribute of the network interface.
dniaAttribute :: Lens' DescribeNetworkInterfaceAttribute (Maybe NetworkInterfaceAttribute)
dniaAttribute = lens _dniaAttribute (\s a -> s { _dniaAttribute = a })
dniaDryRun :: Lens' DescribeNetworkInterfaceAttribute (Maybe Bool)
dniaDryRun = lens _dniaDryRun (\s a -> s { _dniaDryRun = a })
-- | The ID of the network interface.
dniaNetworkInterfaceId :: Lens' DescribeNetworkInterfaceAttribute Text
dniaNetworkInterfaceId =
lens _dniaNetworkInterfaceId (\s a -> s { _dniaNetworkInterfaceId = a })
data DescribeNetworkInterfaceAttributeResponse = DescribeNetworkInterfaceAttributeResponse
{ _dniarAttachment :: Maybe NetworkInterfaceAttachment
, _dniarDescription :: Maybe AttributeValue
, _dniarGroups :: List "item" GroupIdentifier
, _dniarNetworkInterfaceId :: Maybe Text
, _dniarSourceDestCheck :: Maybe AttributeBooleanValue
} deriving (Eq, Read, Show)
-- | 'DescribeNetworkInterfaceAttributeResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dniarAttachment' @::@ 'Maybe' 'NetworkInterfaceAttachment'
--
-- * 'dniarDescription' @::@ 'Maybe' 'AttributeValue'
--
-- * 'dniarGroups' @::@ ['GroupIdentifier']
--
-- * 'dniarNetworkInterfaceId' @::@ 'Maybe' 'Text'
--
-- * 'dniarSourceDestCheck' @::@ 'Maybe' 'AttributeBooleanValue'
--
describeNetworkInterfaceAttributeResponse :: DescribeNetworkInterfaceAttributeResponse
describeNetworkInterfaceAttributeResponse = DescribeNetworkInterfaceAttributeResponse
{ _dniarNetworkInterfaceId = Nothing
, _dniarDescription = Nothing
, _dniarSourceDestCheck = Nothing
, _dniarGroups = mempty
, _dniarAttachment = Nothing
}
-- | The attachment (if any) of the network interface.
dniarAttachment :: Lens' DescribeNetworkInterfaceAttributeResponse (Maybe NetworkInterfaceAttachment)
dniarAttachment = lens _dniarAttachment (\s a -> s { _dniarAttachment = a })
-- | The description of the network interface.
dniarDescription :: Lens' DescribeNetworkInterfaceAttributeResponse (Maybe AttributeValue)
dniarDescription = lens _dniarDescription (\s a -> s { _dniarDescription = a })
-- | The security groups associated with the network interface.
dniarGroups :: Lens' DescribeNetworkInterfaceAttributeResponse [GroupIdentifier]
dniarGroups = lens _dniarGroups (\s a -> s { _dniarGroups = a }) . _List
-- | The ID of the network interface.
dniarNetworkInterfaceId :: Lens' DescribeNetworkInterfaceAttributeResponse (Maybe Text)
dniarNetworkInterfaceId =
lens _dniarNetworkInterfaceId (\s a -> s { _dniarNetworkInterfaceId = a })
-- | Indicates whether source/destination checking is enabled.
dniarSourceDestCheck :: Lens' DescribeNetworkInterfaceAttributeResponse (Maybe AttributeBooleanValue)
dniarSourceDestCheck =
lens _dniarSourceDestCheck (\s a -> s { _dniarSourceDestCheck = a })
instance ToPath DescribeNetworkInterfaceAttribute where
toPath = const "/"
instance ToQuery DescribeNetworkInterfaceAttribute where
toQuery DescribeNetworkInterfaceAttribute{..} = mconcat
[ "Attribute" =? _dniaAttribute
, "DryRun" =? _dniaDryRun
, "NetworkInterfaceId" =? _dniaNetworkInterfaceId
]
instance ToHeaders DescribeNetworkInterfaceAttribute
instance AWSRequest DescribeNetworkInterfaceAttribute where
type Sv DescribeNetworkInterfaceAttribute = EC2
type Rs DescribeNetworkInterfaceAttribute = DescribeNetworkInterfaceAttributeResponse
request = post "DescribeNetworkInterfaceAttribute"
response = xmlResponse
instance FromXML DescribeNetworkInterfaceAttributeResponse where
parseXML x = DescribeNetworkInterfaceAttributeResponse
<$> x .@? "attachment"
<*> x .@? "description"
<*> x .@? "groupSet" .!@ mempty
<*> x .@? "networkInterfaceId"
<*> x .@? "sourceDestCheck"
| kim/amazonka | amazonka-ec2/gen/Network/AWS/EC2/DescribeNetworkInterfaceAttribute.hs | mpl-2.0 | 6,771 | 0 | 16 | 1,304 | 848 | 503 | 345 | 92 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# OPTIONS_GHC -Wall #-}
{- |
Module : Core
Description : Abstract syntax and pretty printer for Core.
Copyright : (c) 2014—2015 The F2J Project Developers (given in AUTHORS.txt)
License : BSD3
Maintainer : Zhiyuan Shi <[email protected]>, Haoyuan Zhang <[email protected]>
Stability : experimental
Portability : portable
-}
module Core
( Type(..)
, Expr(..)
, Alt(..)
, TypeContext
, ValueContext
, Index
, Constructor(..)
, DataBind(..)
, alphaEq
, mapTVar
, mapVar
, fsubstTT
, fsubstTE
, fsubstEE
, joinType
, tVar
, Core.forall
, var
, lam
, fix
, bLam
, prettyType
, prettyExpr
, javaInt
) where
import qualified Src
import JavaUtils
import PrettyUtils
import Text.PrettyPrint.ANSI.Leijen
import qualified Language.Java.Pretty (prettyPrint)
import Data.List (intersperse)
import qualified Data.Map as Map
import qualified Data.Set as Set
data Type t
= TVar Src.ReaderId t -- a
| JClass ClassName -- C
| Fun (Type t) (Type t) -- t1 -> t2
| Forall Src.ReaderId (t -> Type t) -- forall a. t
| Product [Type t] -- (t1, ..., tn)
| Unit
| ListOf (Type t)
| Datatype Src.ReaderId [Type t] [Src.ReaderId]
data Expr t e
= Var Src.ReaderId e
| Lit Src.Lit
-- Binders we have: λ, fix, letrec, and Λ
| Lam Src.ReaderId (Type t) (e -> Expr t e)
| Fix Src.ReaderId Src.ReaderId
(e -> e -> Expr t e)
(Type t) -- t1
(Type t) -- t
-- fix x (x1 : t1) : t. e Syntax in the tal-toplas paper
-- fix (x : t1 -> t). \x1. e Alternative syntax, which is arguably clear
-- <name>: Fix funcName paraName func paraType returnType
| Let Src.ReaderId (Expr t e) (e -> Expr t e)
| LetRec [Src.ReaderId] -- Names
[Type t] -- Signatures
([e] -> [Expr t e]) -- Bindings
([e] -> Expr t e) -- Body
| BLam Src.ReaderId (t -> Expr t e)
| App (Expr t e) (Expr t e)
| TApp (Expr t e) (Type t)
| If (Expr t e) (Expr t e) (Expr t e)
| PrimOp (Expr t e) Src.Operator (Expr t e)
-- SystemF extension from:
-- https://www.cs.princeton.edu/~dpw/papers/tal-toplas.pdf
-- (no int restriction)
| Tuple [Expr t e] -- Tuple introduction
| Proj Int (Expr t e) -- Tuple elimination
-- Java
| JNew ClassName [Expr t e]
| JMethod (Src.JCallee (Expr t e)) MethodName [Expr t e] ClassName
| JField (Src.JCallee (Expr t e)) FieldName ClassName
| PolyList [Expr t e] (Type t)
| JProxyCall (Expr t e) (Type t)
| Seq [Expr t e]
| Data Src.RecFlag [DataBind t] (Expr t e)
| Constr (Constructor t) [Expr t e]
| Case (Expr t e) [Alt t e]
data DataBind t = DataBind Src.ReaderId [Src.ReaderId] ([t] -> [Constructor t])
data Alt t e = ConstrAlt (Constructor t) [Src.ReaderId] ([e] -> Expr t e)
-- | Default (Expr t e)
data Constructor t = Constructor {constrName :: Src.ReaderId, constrParams :: [Type t]}
type TypeContext t = Set.Set t
type ValueContext t e = Map.Map e (Type t)
type Index = Int
alphaEq :: Int -> Type Index -> Type Index -> Bool
alphaEq _ (TVar _ a) (TVar _ b) = a == b
alphaEq _ (JClass c) (JClass d) = c == d
alphaEq i (Fun s1 s2) (Fun t1 t2) = alphaEq i s1 t1 && alphaEq i s2 t2
alphaEq i (Forall _ f) (Forall _ g) = alphaEq (succ i) (f i) (g i)
alphaEq i (Product ss) (Product ts) = length ss == length ts && uncurry (alphaEq i) `all` zip ss ts
alphaEq _ Unit Unit = True
alphaEq i (ListOf t1) (ListOf t2) = alphaEq i t1 t2
alphaEq _ _ _ = False
mapTVar :: (Src.ReaderId -> t -> Type t) -> Type t -> Type t
mapTVar g (TVar n a) = g n a
mapTVar _ (JClass c) = JClass c
mapTVar g (Fun t1 t2) = Fun (mapTVar g t1) (mapTVar g t2)
mapTVar g (Forall n f) = Forall n (mapTVar g . f)
mapTVar g (Product ts) = Product (map (mapTVar g) ts)
mapTVar _ Unit = Unit
mapTVar g (Datatype n ts ns) = Datatype n (map (mapTVar g) ts) ns
mapTVar g (ListOf t) = ListOf (mapTVar g t)
mapVar :: (Src.ReaderId -> e -> Expr t e) -> (Type t -> Type t) -> Expr t e -> Expr t e
mapVar g _ (Var n a) = g n a
mapVar _ _ (Lit n) = Lit n
mapVar g h (Lam n t f) = Lam n (h t) (mapVar g h . f)
mapVar g h (BLam n f) = BLam n (mapVar g h . f)
mapVar g h (Fix n1 n2 f t1 t) = Fix n1 n2 (\x x1 -> mapVar g h (f x x1)) (h t1) (h t)
mapVar g h (Let n b e) = Let n (mapVar g h b) (mapVar g h . e)
mapVar g h (LetRec ns ts bs e) = LetRec ns (map h ts) (map (mapVar g h) . bs) (mapVar g h . e)
mapVar g h (Data rec databinds e) = Data rec (map mapDatabind databinds) (mapVar g h e)
where mapDatabind (DataBind name params ctrs) = DataBind name params (map mapCtr. ctrs)
mapCtr (Constructor n ts) = Constructor n (map h ts)
mapVar g h (Constr (Constructor n ts) es) = Constr c' (map (mapVar g h) es)
where c' = Constructor n (map h ts)
mapVar g h (Case e alts) = Case (mapVar g h e) (map mapAlt alts)
where mapAlt (ConstrAlt (Constructor n ts) ns f) = ConstrAlt (Constructor n (map h ts)) ns ((mapVar g h) . f)
mapVar g h (App f e) = App (mapVar g h f) (mapVar g h e)
mapVar g h (TApp f t) = TApp (mapVar g h f) (h t)
mapVar g h (If p b1 b2) = If (mapVar g h p) (mapVar g h b1) (mapVar g h b2)
mapVar g h (PrimOp e1 op e2) = PrimOp (mapVar g h e1) op (mapVar g h e2)
mapVar g h (Tuple es) = Tuple (map (mapVar g h) es)
mapVar g h (Proj i e) = Proj i (mapVar g h e)
mapVar g h (JNew c args) = JNew c (map (mapVar g h) args)
mapVar g h (JMethod callee m args c) = JMethod (fmap (mapVar g h) callee) m (map (mapVar g h) args) c
mapVar g h (JField callee f c) = JField (fmap (mapVar g h) callee) f c
mapVar g h (Seq es) = Seq (map (mapVar g h) es)
mapVar g h (PolyList es t) = PolyList (map (mapVar g h) es) (h t)
mapVar g h (JProxyCall jmethod t) = JProxyCall (mapVar g h jmethod) (h t)
fsubstTT :: Eq a => a -> Type a -> Type a -> Type a
fsubstTT x r = mapTVar (\n a -> if a == x then r else TVar n a)
fsubstTE :: Eq t => t -> Type t -> Expr t e -> Expr t e
fsubstTE x r = mapVar Var (fsubstTT x r)
fsubstEE :: Eq a => a -> Expr t a -> Expr t a -> Expr t a
fsubstEE x r = mapVar (\n a -> if a == x then r else Var n a) id
joinType :: Type (Type t) -> Type t
joinType (TVar _ a) = a
joinType (JClass c) = JClass c
joinType (Fun t1 t2) = Fun (joinType t1) (joinType t2)
joinType (Forall n g) = Forall n (joinType . g . TVar "_") -- Right?
joinType (Product ts) = Product (map joinType ts)
joinType Unit = Unit
joinType (Datatype n ts ns) = Datatype n (map joinType ts) ns
joinType (ListOf t) = ListOf (joinType t)
tVar :: t -> Type t
tVar = TVar "_"
forall :: (t -> Type t) -> Type t
forall f = Forall "_" f
var :: e -> Expr t e
var = Var "_"
lam :: Type t -> (e -> Expr t e) -> Expr t e
lam = Lam "_"
fix :: (e -> e -> Expr t e) -> Type t -> Type t -> Expr t e
fix = Fix "_" "_"
bLam :: (t -> Expr t e) -> Expr t e
bLam = BLam "_"
-- instance Show (Type Index) where
-- show = show . pretty
-- instance Pretty (Type Index) where
-- pretty = prettyType
prettyType :: Type Index -> Doc
prettyType = prettyType' basePrec 0
prettyType' :: Prec -> Index -> Type Index -> Doc
prettyType' _ _ (TVar n _) = text n
prettyType' p i (Datatype n tvars _) = hsep $ text n : map (prettyType' p i) tvars
prettyType' p i (Fun t1 t2) =
parensIf p 2
(prettyType' (2,PrecPlus) i t1 <+> arrow <+> prettyType' (2,PrecMinus) i t2)
prettyType' p i (Forall n f) =
parensIf p 1
(PrettyUtils.forall <+> text n <> dot <+>
prettyType' (1,PrecMinus) (succ i) (f i))
prettyType' _ i (Product ts) = parens $ hcat (intersperse comma (map (prettyType' basePrec i) ts))
prettyType' _ _ Unit = text "Unit"
prettyType' _ _ (JClass "java.lang.Integer") = text "Int"
prettyType' _ _ (JClass "java.lang.String") = text "String"
prettyType' _ _ (JClass "java.lang.Boolean") = text "Bool"
prettyType' _ _ (JClass "java.lang.Character") = text "Char"
prettyType' _ _ (JClass c) = text c
prettyType' p i (ListOf t) = text "List" <+> prettyType' p i t
-- instance Show (Expr Index Index) where
-- show = show . pretty
-- instance Pretty (Expr Index Index) where
-- pretty = prettyExpr
prettyExpr :: Expr Index Index -> Doc
prettyExpr = prettyExpr' basePrec (0, 0)
prettyExpr' :: Prec -> (Index, Index) -> Expr Index Index -> Doc
prettyExpr' _ _ (Var n _) = text n
prettyExpr' p (i,j) (Lam n t f)
= parensIf p 2 $ group $ hang 2 $
lambda <+> parens (text n <+> colon <+> prettyType' basePrec i t) <+> text "->" <$>
prettyExpr' (2,PrecMinus) (i, j + 1) (f j)
prettyExpr' p (i,j) (App e1 e2)
= parensIf p 4 $
group $ hang 2 $ prettyExpr' (4,PrecMinus) (i,j) e1 <$> prettyExpr' (4,PrecPlus) (i,j) e2
prettyExpr' p (i,j) (BLam n f) =
parensIf p 2
(biglambda <+> text n <+> text "->" <+>
prettyExpr' (2,PrecMinus) (succ i, j) (f i))
prettyExpr' p (i,j) (TApp e t) =
parensIf p 4
(group $ hang 2 $ prettyExpr' (4,PrecMinus) (i,j) e <$> brackets (prettyType' basePrec i t))
prettyExpr' _ _ (Lit (Src.Int n)) = integer n
prettyExpr' _ _ (Lit (Src.String s)) = dquotes (string s)
prettyExpr' _ _ (Lit (Src.Bool b)) = bool b
prettyExpr' _ _ (Lit (Src.Char c)) = char c
prettyExpr' _ _ (Lit Src.UnitLit) = unit
prettyExpr' p (i,j) (If e1 e2 e3)
= parensIf p prec
(hang 3 (text "if" <+> prettyExpr' (prec,PrecMinus) (i,j) e1 <+>
text "then" <+> prettyExpr' (prec,PrecMinus) (i,j) e2 <+>
text "else" <+> prettyExpr' (prec,PrecMinus) (i,j) e3))
where prec = 3
prettyExpr' p (i,j) (PrimOp e1 op e2)
= parens (prettyExpr' p (i,j) e1 <+> pretty_op <+> prettyExpr' p (i,j) e2)
where
pretty_op = text (Language.Java.Pretty.prettyPrint java_op)
java_op = case op of
Src.Arith op' -> op'
Src.Compare op' -> op'
Src.Logic op' -> op'
prettyExpr' _ (i,j) (Tuple es) = tupled (map (prettyExpr' basePrec (i,j)) es)
prettyExpr' p i (Proj n e) =
parensIf p 5
(prettyExpr' (5,PrecMinus) i e <> dot <> char '_' <> int n)
prettyExpr' _ (i,j) (JNew c args) =
parens (text "new" <+> text c <> tupled (map (prettyExpr' basePrec (i,j)) args))
prettyExpr' _ i (JMethod name m args _) = methodStr name <> dot <> text m <> tupled (map (prettyExpr' basePrec i) args)
where
methodStr (Src.Static x) = text x
methodStr (Src.NonStatic x) = prettyExpr' (6,PrecMinus) i x
prettyExpr' _ i (JField name f _) = fieldStr name <> dot <> text f
where
fieldStr (Src.Static x) = text x
fieldStr (Src.NonStatic x) = prettyExpr' (6,PrecMinus) i x
prettyExpr' p i (PolyList es t) = brackets. hcat . intersperse comma . map (prettyExpr' p i ) $ es
prettyExpr' p i (JProxyCall jmethod t) = prettyExpr' p i jmethod
prettyExpr' p (i,j) (Seq es) = semiBraces (map (prettyExpr' p (i,j)) es)
prettyExpr' p (i,j) (Fix n1 n2 f t1 t)
= parens $ group $ hang 2 $
text "fix" <+> text n1 <+>
parens (text n2 <+> colon <+> prettyType' p i t1) <+>
colon <+> prettyType' p i t <> dot <$>
prettyExpr' p (i, j + 2) (f j (j + 1))
prettyExpr' _ (i,j) (Let n b e) =
text "let" <+> text n <+> equals <+> prettyExpr' basePrec (i, j + 1) b <$> text "in" <$>
prettyExpr' basePrec (i, j + 1) (e j)
prettyExpr' p (i,j) (LetRec names sigs binds body)
= text "let" <+> text "rec" <$>
vcat (intersperse (text "and") (map (indent 2) pretty_binds)) <$>
text "in" <$>
pretty_body
where
n = length sigs
ids = [i..(i+n-1)]
pretty_ids = map text names
pretty_sigs = map (prettyType' p i) sigs
pretty_defs = map (prettyExpr' p (i, j + n)) (binds ids)
pretty_binds = zipWith3 (\pretty_id pretty_sig pretty_def ->
pretty_id <+> colon <+> pretty_sig <$> indent 2 (equals <+> pretty_def))
pretty_ids pretty_sigs pretty_defs
pretty_body = prettyExpr' p (i, j + n) (body ids)
prettyExpr' p (i,j) (Data recflag databinds e) =
text "data" <+> (pretty recflag) <+> (align .vsep) (map prettyDatabind databinds) <$> prettyExpr' p (i,j) e
where prettyCtr i' (Constructor ctrName ctrParams) = (text ctrName) <+> (hsep. map (prettyType' p i') $ ctrParams)
prettyDatabind (DataBind n tvars cons) = hsep (map text $ n:tvars) <+> align
(equals <+> intersperseBar (map (prettyCtr (i+ (length tvars)))$ cons [i..(i-1+(length tvars))]) <$$> semi)
prettyExpr' p (i,j) (Constr c es) = parens $ hsep $ text (constrName c) : map (prettyExpr' p (i,j)) es
prettyExpr' p (i,j) (Case e alts) =
hang 2 $ text "case" <+> prettyExpr' p (i,j) e <+> text "of" <$> text " " <+> Src.intersperseBar (map pretty_alt alts)
where pretty_alt (ConstrAlt c ns es) =
let n = length ns
ids = [j..j+n-1]
in hsep (text (constrName c) : (map prettyVar ids)) <+> arrow <+> prettyExpr' p (i, j+n) (es ids)
javaInt :: Type t
javaInt = JClass "java.lang.Integer"
| wxzh/fcore | lib/Core.hs | bsd-2-clause | 13,701 | 1 | 21 | 4,052 | 6,203 | 3,158 | 3,045 | 259 | 5 |
{-# LANGUAGE RecordWildCards #-}
module Main (main) where
import Data.Traversable
import Control.Monad.Except
import Options.Applicative
import qualified Kalium
data Options = Options
{ filenameIn :: String
, filenameOut :: String
, logSwitch :: Bool
, patSigSwitch :: Bool
}
pOptions :: Parser Options
pOptions = Options
<$> strArgument (metavar "FILENAME.PAS" <> help "Source Pascal program")
<*> strArgument (metavar "FILENAME.HS" <> help "Target Haskell program")
<*> switch (long "log" <> help "Print intermediate results")
<*> switch (long "pat" <> help "Generate pattern signatures")
handleOptions :: ParserInfo Options
handleOptions = info (helper <*> pOptions) (header "Kalium")
main :: IO ()
main = execParser handleOptions >>= processFiles
processFiles :: Options -> IO ()
processFiles Options{..} = do
content <- readFile filenameIn
case runExcept (Kalium.translate patSigSwitch content) of
Left e -> print e
Right (log, r) -> do
when logSwitch $ do
void $ for log $ \repr -> do
putStrLn repr
putStr "\n\n"
writeFile filenameOut r
| rscprof/kalium | kalium-cli/Main.hs | bsd-3-clause | 1,193 | 0 | 20 | 302 | 344 | 171 | 173 | 32 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
#ifndef MIN_VERSION_profunctors
#define MIN_VERSION_profunctors(x,y,z) 1
#endif
#if __GLASGOW_HASKELL__ < 708 || !(MIN_VERSION_profunctors(4,4,0))
{-# LANGUAGE Trustworthy #-}
#endif
#if __GLASGOW_HASKELL__ && __GLASGOW_HASKELL__ >= 704
{-# LANGUAGE NoPolyKinds #-}
{-# LANGUAGE NoDataKinds #-}
#endif
-------------------------------------------------------------------------------
-- |
-- Module : Control.Lens.Getter
-- Copyright : (C) 2012-15 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : provisional
-- Portability : Rank2Types
--
--
-- A @'Getter' s a@ is just any function @(s -> a)@, which we've flipped
-- into continuation passing style, @(a -> r) -> s -> r@ and decorated
-- with 'Const' to obtain:
--
-- @type 'Getting' r s a = (a -> 'Const' r a) -> s -> 'Const' r s@
--
-- If we restrict access to knowledge about the type 'r', we could get:
--
-- @type 'Getter' s a = forall r. 'Getting' r s a@
--
-- However, for 'Getter' (but not for 'Getting') we actually permit any
-- functor @f@ which is an instance of both 'Functor' and 'Contravariant':
--
-- @type 'Getter' s a = forall f. ('Contravariant' f, 'Functor' f) => (a -> f a) -> s -> f s@
--
-- Everything you can do with a function, you can do with a 'Getter', but
-- note that because of the continuation passing style ('.') composes them
-- in the opposite order.
--
-- Since it is only a function, every 'Getter' obviously only retrieves a
-- single value for a given input.
--
-------------------------------------------------------------------------------
module Control.Lens.Getter
(
-- * Getters
Getter, IndexedGetter
, Getting, IndexedGetting
, Accessing
-- * Building Getters
, to
, ito
, like
, ilike
-- * Combinators for Getters and Folds
, (^.)
, view, views
, use, uses
, listening, listenings
-- * Indexed Getters
-- ** Indexed Getter Combinators
, (^@.)
, iview, iviews
, iuse, iuses
, ilistening, ilistenings
-- * Implementation Details
, Contravariant(..)
, coerce, coerced
, Const(..)
) where
import Control.Applicative
import Control.Lens.Internal.Getter
import Control.Lens.Internal.Indexed
import Control.Lens.Type
import Control.Monad.Reader.Class as Reader
import Control.Monad.State as State
import Control.Monad.Writer as Writer
import Data.Functor.Contravariant
import Data.Profunctor
import Data.Profunctor.Unsafe
-- $setup
-- >>> :set -XNoOverloadedStrings
-- >>> import Control.Lens
-- >>> import Data.List.Lens
-- >>> import Debug.SimpleReflect.Expr
-- >>> import Debug.SimpleReflect.Vars as Vars hiding (f,g)
-- >>> let f :: Expr -> Expr; f = Debug.SimpleReflect.Vars.f
-- >>> let g :: Expr -> Expr; g = Debug.SimpleReflect.Vars.g
infixl 8 ^., ^@.
-------------------------------------------------------------------------------
-- Getters
-------------------------------------------------------------------------------
-- | Build an (index-preserving) 'Getter' from an arbitrary Haskell function.
--
-- @
-- 'to' f '.' 'to' g ≡ 'to' (g '.' f)
-- @
--
-- @
-- a '^.' 'to' f ≡ f a
-- @
--
-- >>> a ^.to f
-- f a
--
-- >>> ("hello","world")^.to snd
-- "world"
--
-- >>> 5^.to succ
-- 6
--
-- >>> (0, -5)^._2.to abs
-- 5
--
-- @
-- 'to' :: (s -> a) -> 'IndexPreservingGetter' s a
-- @
to :: (Profunctor p, Contravariant f) => (s -> a) -> Optical' p p f s a
to k = dimap k (contramap k)
{-# INLINE to #-}
-- |
-- @
-- 'ito' :: (s -> (i, a)) -> 'IndexedGetter' i s a
-- @
ito :: (Indexable i p, Contravariant f) => (s -> (i, a)) -> Optical' p (->) f s a
ito k = dimap k (contramap (snd . k)) . uncurry . indexed
{-# INLINE ito #-}
-- | Build an constant-valued (index-preserving) 'Getter' from an arbitrary Haskell value.
--
-- @
-- 'like' a '.' 'like' b ≡ 'like' b
-- a '^.' 'like' b ≡ b
-- a '^.' 'like' b ≡ a '^.' 'to' ('const' b)
-- @
--
-- This can be useful as a second case 'failing' a 'Fold'
-- e.g. @foo `failing` 'like' 0@
--
-- @
-- 'like' :: a -> 'IndexPreservingGetter' s a
-- @
like :: (Profunctor p, Contravariant f) => a -> Optical' p p f s a
like a = to (const a)
{-# INLINE like #-}
-- |
-- @
-- 'ilike' :: i -> a -> 'IndexedGetter' i s a
-- @
ilike :: (Indexable i p, Contravariant f) => i -> a -> Optical' p (->) f s a
ilike i a = ito (const (i, a))
{-# INLINE ilike #-}
-- | When you see this in a type signature it indicates that you can
-- pass the function a 'Lens', 'Getter',
-- 'Control.Lens.Traversal.Traversal', 'Control.Lens.Fold.Fold',
-- 'Control.Lens.Prism.Prism', 'Control.Lens.Iso.Iso', or one of
-- the indexed variants, and it will just \"do the right thing\".
--
-- Most 'Getter' combinators are able to be used with both a 'Getter' or a
-- 'Control.Lens.Fold.Fold' in limited situations, to do so, they need to be
-- monomorphic in what we are going to extract with 'Control.Applicative.Const'. To be compatible
-- with 'Lens', 'Control.Lens.Traversal.Traversal' and
-- 'Control.Lens.Iso.Iso' we also restricted choices of the irrelevant @t@ and
-- @b@ parameters.
--
-- If a function accepts a @'Getting' r s a@, then when @r@ is a 'Data.Monoid.Monoid', then
-- you can pass a 'Control.Lens.Fold.Fold' (or
-- 'Control.Lens.Traversal.Traversal'), otherwise you can only pass this a
-- 'Getter' or 'Lens'.
type Getting r s a = (a -> Const r a) -> s -> Const r s
-- | Used to consume an 'Control.Lens.Fold.IndexedFold'.
type IndexedGetting i m s a = Indexed i a (Const m a) -> s -> Const m s
-- | This is a convenient alias used when consuming (indexed) getters and (indexed) folds
-- in a highly general fashion.
type Accessing p m s a = p a (Const m a) -> s -> Const m s
-------------------------------------------------------------------------------
-- Getting Values
-------------------------------------------------------------------------------
-- | View the value pointed to by a 'Getter', 'Control.Lens.Iso.Iso' or
-- 'Lens' or the result of folding over all the results of a
-- 'Control.Lens.Fold.Fold' or 'Control.Lens.Traversal.Traversal' that points
-- at a monoidal value.
--
-- @
-- 'view' '.' 'to' ≡ 'id'
-- @
--
-- >>> view (to f) a
-- f a
--
-- >>> view _2 (1,"hello")
-- "hello"
--
-- >>> view (to succ) 5
-- 6
--
-- >>> view (_2._1) ("hello",("world","!!!"))
-- "world"
--
--
-- As 'view' is commonly used to access the target of a 'Getter' or obtain a monoidal summary of the targets of a 'Fold',
-- It may be useful to think of it as having one of these more restricted signatures:
--
-- @
-- 'view' :: 'Getter' s a -> s -> a
-- 'view' :: 'Data.Monoid.Monoid' m => 'Control.Lens.Fold.Fold' s m -> s -> m
-- 'view' :: 'Control.Lens.Iso.Iso'' s a -> s -> a
-- 'view' :: 'Lens'' s a -> s -> a
-- 'view' :: 'Data.Monoid.Monoid' m => 'Control.Lens.Traversal.Traversal'' s m -> s -> m
-- @
--
-- In a more general setting, such as when working with a 'Monad' transformer stack you can use:
--
-- @
-- 'view' :: 'MonadReader' s m => 'Getter' s a -> m a
-- 'view' :: ('MonadReader' s m, 'Data.Monoid.Monoid' a) => 'Control.Lens.Fold.Fold' s a -> m a
-- 'view' :: 'MonadReader' s m => 'Control.Lens.Iso.Iso'' s a -> m a
-- 'view' :: 'MonadReader' s m => 'Lens'' s a -> m a
-- 'view' :: ('MonadReader' s m, 'Data.Monoid.Monoid' a) => 'Control.Lens.Traversal.Traversal'' s a -> m a
-- @
view :: MonadReader s m => Getting a s a -> m a
view l = Reader.asks (getConst #. l Const)
{-# INLINE view #-}
-- | View a function of the value pointed to by a 'Getter' or 'Lens' or the result of
-- folding over the result of mapping the targets of a 'Control.Lens.Fold.Fold' or
-- 'Control.Lens.Traversal.Traversal'.
--
-- @
-- 'views' l f ≡ 'view' (l '.' 'to' f)
-- @
--
-- >>> views (to f) g a
-- g (f a)
--
-- >>> views _2 length (1,"hello")
-- 5
--
-- As 'views' is commonly used to access the target of a 'Getter' or obtain a monoidal summary of the targets of a 'Fold',
-- It may be useful to think of it as having one of these more restricted signatures:
--
-- @
-- 'views' :: 'Getter' s a -> (a -> r) -> s -> r
-- 'views' :: 'Data.Monoid.Monoid' m => 'Control.Lens.Fold.Fold' s a -> (a -> m) -> s -> m
-- 'views' :: 'Control.Lens.Iso.Iso'' s a -> (a -> r) -> s -> r
-- 'views' :: 'Lens'' s a -> (a -> r) -> s -> r
-- 'views' :: 'Data.Monoid.Monoid' m => 'Control.Lens.Traversal.Traversal'' s a -> (a -> m) -> s -> m
-- @
--
-- In a more general setting, such as when working with a 'Monad' transformer stack you can use:
--
-- @
-- 'view' :: 'MonadReader' s m => 'Getter' s a -> m a
-- 'view' :: ('MonadReader' s m, 'Data.Monoid.Monoid' a) => 'Control.Lens.Fold.Fold' s a -> m a
-- 'view' :: 'MonadReader' s m => 'Control.Lens.Iso.Iso'' s a -> m a
-- 'view' :: 'MonadReader' s m => 'Lens'' s a -> m a
-- 'view' :: ('MonadReader' s m, 'Data.Monoid.Monoid' a) => 'Control.Lens.Traversal.Traversal'' s a -> m a
-- @
--
-- @
-- 'views' :: 'MonadReader' s m => 'Getting' r s a -> (a -> r) -> m r
-- @
views :: (Profunctor p, MonadReader s m) => Optical p (->) (Const r) s s a a -> p a r -> m r
views l f = Reader.asks (getConst #. l (Const #. f))
{-# INLINE views #-}
-- | View the value pointed to by a 'Getter' or 'Lens' or the
-- result of folding over all the results of a 'Control.Lens.Fold.Fold' or
-- 'Control.Lens.Traversal.Traversal' that points at a monoidal values.
--
-- This is the same operation as 'view' with the arguments flipped.
--
-- The fixity and semantics are such that subsequent field accesses can be
-- performed with ('Prelude..').
--
-- >>> (a,b)^._2
-- b
--
-- >>> ("hello","world")^._2
-- "world"
--
-- >>> import Data.Complex
-- >>> ((0, 1 :+ 2), 3)^._1._2.to magnitude
-- 2.23606797749979
--
-- @
-- ('^.') :: s -> 'Getter' s a -> a
-- ('^.') :: 'Data.Monoid.Monoid' m => s -> 'Control.Lens.Fold.Fold' s m -> m
-- ('^.') :: s -> 'Control.Lens.Iso.Iso'' s a -> a
-- ('^.') :: s -> 'Lens'' s a -> a
-- ('^.') :: 'Data.Monoid.Monoid' m => s -> 'Control.Lens.Traversal.Traversal'' s m -> m
-- @
(^.) :: s -> Getting a s a -> a
s ^. l = getConst (l Const s)
{-# INLINE (^.) #-}
-------------------------------------------------------------------------------
-- MonadState
-------------------------------------------------------------------------------
-- | Use the target of a 'Lens', 'Control.Lens.Iso.Iso', or
-- 'Getter' in the current state, or use a summary of a
-- 'Control.Lens.Fold.Fold' or 'Control.Lens.Traversal.Traversal' that points
-- to a monoidal value.
--
-- >>> evalState (use _1) (a,b)
-- a
--
-- >>> evalState (use _1) ("hello","world")
-- "hello"
--
-- @
-- 'use' :: 'MonadState' s m => 'Getter' s a -> m a
-- 'use' :: ('MonadState' s m, 'Data.Monoid.Monoid' r) => 'Control.Lens.Fold.Fold' s r -> m r
-- 'use' :: 'MonadState' s m => 'Control.Lens.Iso.Iso'' s a -> m a
-- 'use' :: 'MonadState' s m => 'Lens'' s a -> m a
-- 'use' :: ('MonadState' s m, 'Data.Monoid.Monoid' r) => 'Control.Lens.Traversal.Traversal'' s r -> m r
-- @
use :: MonadState s m => Getting a s a -> m a
use l = State.gets (view l)
{-# INLINE use #-}
-- | Use the target of a 'Lens', 'Control.Lens.Iso.Iso' or
-- 'Getter' in the current state, or use a summary of a
-- 'Control.Lens.Fold.Fold' or 'Control.Lens.Traversal.Traversal' that
-- points to a monoidal value.
--
-- >>> evalState (uses _1 length) ("hello","world")
-- 5
--
-- @
-- 'uses' :: 'MonadState' s m => 'Getter' s a -> (a -> r) -> m r
-- 'uses' :: ('MonadState' s m, 'Data.Monoid.Monoid' r) => 'Control.Lens.Fold.Fold' s a -> (a -> r) -> m r
-- 'uses' :: 'MonadState' s m => 'Lens'' s a -> (a -> r) -> m r
-- 'uses' :: 'MonadState' s m => 'Control.Lens.Iso.Iso'' s a -> (a -> r) -> m r
-- 'uses' :: ('MonadState' s m, 'Data.Monoid.Monoid' r) => 'Control.Lens.Traversal.Traversal'' s a -> (a -> r) -> m r
-- @
--
-- @
-- 'uses' :: 'MonadState' s m => 'Getting' r s t a b -> (a -> r) -> m r
-- @
uses :: (Profunctor p, MonadState s m) => Optical p (->) (Const r) s s a a -> p a r -> m r
uses l f = State.gets (views l f)
{-# INLINE uses #-}
-- | This is a generalized form of 'listen' that only extracts the portion of
-- the log that is focused on by a 'Getter'. If given a 'Fold' or a 'Traversal'
-- then a monoidal summary of the parts of the log that are visited will be
-- returned.
--
-- @
-- 'listening' :: 'MonadWriter' w m => 'Getter' w u -> m a -> m (a, u)
-- 'listening' :: 'MonadWriter' w m => 'Lens'' w u -> m a -> m (a, u)
-- 'listening' :: 'MonadWriter' w m => 'Iso'' w u -> m a -> m (a, u)
-- 'listening' :: ('MonadWriter' w m, 'Monoid' u) => 'Fold' w u -> m a -> m (a, u)
-- 'listening' :: ('MonadWriter' w m, 'Monoid' u) => 'Traversal'' w u -> m a -> m (a, u)
-- 'listening' :: ('MonadWriter' w m, 'Monoid' u) => 'Prism'' w u -> m a -> m (a, u)
-- @
listening :: MonadWriter w m => Getting u w u -> m a -> m (a, u)
listening l m = do
(a, w) <- listen m
return (a, view l w)
{-# INLINE listening #-}
-- | This is a generalized form of 'listen' that only extracts the portion of
-- the log that is focused on by a 'Getter'. If given a 'Fold' or a 'Traversal'
-- then a monoidal summary of the parts of the log that are visited will be
-- returned.
--
-- @
-- 'ilistening' :: 'MonadWriter' w m => 'IndexedGetter' i w u -> m a -> m (a, (i, u))
-- 'ilistening' :: 'MonadWriter' w m => 'IndexedLens'' i w u -> m a -> m (a, (i, u))
-- 'ilistening' :: ('MonadWriter' w m, 'Monoid' u) => 'IndexedFold' i w u -> m a -> m (a, (i, u))
-- 'ilistening' :: ('MonadWriter' w m, 'Monoid' u) => 'IndexedTraversal'' i w u -> m a -> m (a, (i, u))
-- @
ilistening :: MonadWriter w m => IndexedGetting i (i, u) w u -> m a -> m (a, (i, u))
ilistening l m = do
(a, w) <- listen m
return (a, iview l w)
{-# INLINE ilistening #-}
-- | This is a generalized form of 'listen' that only extracts the portion of
-- the log that is focused on by a 'Getter'. If given a 'Fold' or a 'Traversal'
-- then a monoidal summary of the parts of the log that are visited will be
-- returned.
--
-- @
-- 'listenings' :: 'MonadWriter' w m => 'Getter' w u -> (u -> v) -> m a -> m (a, v)
-- 'listenings' :: 'MonadWriter' w m => 'Lens'' w u -> (u -> v) -> m a -> m (a, v)
-- 'listenings' :: 'MonadWriter' w m => 'Iso'' w u -> (u -> v) -> m a -> m (a, v)
-- 'listenings' :: ('MonadWriter' w m, 'Monoid' v) => 'Fold' w u -> (u -> v) -> m a -> m (a, v)
-- 'listenings' :: ('MonadWriter' w m, 'Monoid' v) => 'Traversal'' w u -> (u -> v) -> m a -> m (a, v)
-- 'listenings' :: ('MonadWriter' w m, 'Monoid' v) => 'Prism'' w u -> (u -> v) -> m a -> m (a, v)
-- @
listenings :: MonadWriter w m => Getting v w u -> (u -> v) -> m a -> m (a, v)
listenings l uv m = do
(a, w) <- listen m
return (a, views l uv w)
{-# INLINE listenings #-}
-- | This is a generalized form of 'listen' that only extracts the portion of
-- the log that is focused on by a 'Getter'. If given a 'Fold' or a 'Traversal'
-- then a monoidal summary of the parts of the log that are visited will be
-- returned.
--
-- @
-- 'ilistenings' :: 'MonadWriter' w m => 'IndexedGetter' w u -> (i -> u -> v) -> m a -> m (a, v)
-- 'ilistenings' :: 'MonadWriter' w m => 'IndexedLens'' w u -> (i -> u -> v) -> m a -> m (a, v)
-- 'ilistenings' :: ('MonadWriter' w m, 'Monoid' v) => 'IndexedFold' w u -> (i -> u -> v) -> m a -> m (a, v)
-- 'ilistenings' :: ('MonadWriter' w m, 'Monoid' v) => 'IndexedTraversal'' w u -> (i -> u -> v) -> m a -> m (a, v)
-- @
ilistenings :: MonadWriter w m => IndexedGetting i v w u -> (i -> u -> v) -> m a -> m (a, v)
ilistenings l iuv m = do
(a, w) <- listen m
return (a, iviews l iuv w)
{-# INLINE ilistenings #-}
------------------------------------------------------------------------------
-- Indexed Getters
------------------------------------------------------------------------------
-- | View the index and value of an 'IndexedGetter' into the current environment as a pair.
--
-- When applied to an 'IndexedFold' the result will most likely be a nonsensical monoidal summary of
-- the indices tupled with a monoidal summary of the values and probably not whatever it is you wanted.
iview :: MonadReader s m => IndexedGetting i (i,a) s a -> m (i,a)
iview l = asks (getConst #. l (Indexed $ \i -> Const #. (,) i))
{-# INLINE iview #-}
-- | View a function of the index and value of an 'IndexedGetter' into the current environment.
--
-- When applied to an 'IndexedFold' the result will be a monoidal summary instead of a single answer.
--
-- @
-- 'iviews' ≡ 'Control.Lens.Fold.ifoldMapOf'
-- @
iviews :: MonadReader s m => IndexedGetting i r s a -> (i -> a -> r) -> m r
iviews l = views l .# Indexed
{-# INLINE iviews #-}
-- | Use the index and value of an 'IndexedGetter' into the current state as a pair.
--
-- When applied to an 'IndexedFold' the result will most likely be a nonsensical monoidal summary of
-- the indices tupled with a monoidal summary of the values and probably not whatever it is you wanted.
iuse :: MonadState s m => IndexedGetting i (i,a) s a -> m (i,a)
iuse l = gets (getConst #. l (Indexed $ \i -> Const #. (,) i))
{-# INLINE iuse #-}
-- | Use a function of the index and value of an 'IndexedGetter' into the current state.
--
-- When applied to an 'IndexedFold' the result will be a monoidal summary instead of a single answer.
iuses :: MonadState s m => IndexedGetting i r s a -> (i -> a -> r) -> m r
iuses l = uses l .# Indexed
{-# INLINE iuses #-}
-- | View the index and value of an 'IndexedGetter' or 'IndexedLens'.
--
-- This is the same operation as 'iview' with the arguments flipped.
--
-- The fixity and semantics are such that subsequent field accesses can be
-- performed with ('Prelude..').
--
-- @
-- ('^@.') :: s -> 'IndexedGetter' i s a -> (i, a)
-- ('^@.') :: s -> 'IndexedLens'' i s a -> (i, a)
-- @
--
-- The result probably doesn't have much meaning when applied to an 'IndexedFold'.
(^@.) :: s -> IndexedGetting i (i, a) s a -> (i, a)
s ^@. l = getConst $ l (Indexed $ \i -> Const #. (,) i) s
{-# INLINE (^@.) #-}
-- | Coerce a 'Getter'-compatible 'LensLike' to a 'LensLike''. This
-- is useful when using a 'Traversal' that is not simple as a 'Getter' or a
-- 'Fold'.
coerced :: (Functor f, Contravariant f) => LensLike f s t a b -> LensLike' f s a
coerced l f = coerce . l (coerce . f)
| rpglover64/lens | src/Control/Lens/Getter.hs | bsd-3-clause | 18,862 | 0 | 14 | 4,184 | 2,078 | 1,274 | 804 | 103 | 1 |
{- |
This is the main module of FunGEN (Functional Game Engine), which re-exports the rest.
-}
{-
FunGEN - Functional Game Engine
http://joyful.com/fungen
Copyright (C)
2002 Andre Furtado <[email protected]>
2008, 2011-2013 Simon Michael <[email protected]>
This code is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-}
module Graphics.UI.Fungen (
-- * Execution
-- | Starting and stopping a game.
module Graphics.UI.Fungen.Init,
-- * Types
-- | Some basic types.
module Graphics.UI.Fungen.Types,
-- * Images
-- | Loading BMP image files.
module Graphics.UI.Fungen.Loader,
-- * Text
-- | Printing text on the screen.
module Graphics.UI.Fungen.Text,
-- * Maps/backgrounds
-- | Game backgrounds, tile maps.
module Graphics.UI.Fungen.Map,
-- * Objects
-- | Game objects (sprites).
module Graphics.UI.Fungen.Objects,
-- * Input
-- | User input from mouse and keyboard.
module Graphics.UI.Fungen.Input,
-- * Timing
-- | Timing control.
module Graphics.UI.Fungen.Timer,
-- * Game
-- | Game management and various game utilities.
module Graphics.UI.Fungen.Game,
-- * Display
-- | Rendering the game window.
module Graphics.UI.Fungen.Display,
-- * Util
-- | Miscellaneous utilities.
module Graphics.UI.Fungen.Util
) where
import Graphics.UI.Fungen.Types
import Graphics.UI.Fungen.Util
import Graphics.UI.Fungen.Loader
import Graphics.UI.Fungen.Objects
import Graphics.UI.Fungen.Map
import Graphics.UI.Fungen.Game
import Graphics.UI.Fungen.Display
import Graphics.UI.Fungen.Input
import Graphics.UI.Fungen.Timer
import Graphics.UI.Fungen.Text
import Graphics.UI.Fungen.Init
| fffej/fungen | Graphics/UI/Fungen.hs | bsd-3-clause | 1,848 | 0 | 5 | 380 | 198 | 151 | 47 | 23 | 0 |
f = a b (\x -> c x d) | mpickering/hlint-refactor | tests/examples/Lambda25.hs | bsd-3-clause | 22 | 0 | 8 | 9 | 24 | 12 | 12 | 1 | 1 |
{-# LANGUAGE PatternGuards #-}
module Idris.Elab.Clause where
import Idris.AbsSyntax
import Idris.ASTUtils
import Idris.DSL
import Idris.Error
import Idris.Delaborate
import Idris.Imports
import Idris.Elab.Term
import Idris.Coverage
import Idris.DataOpts
import Idris.Providers
import Idris.Primitives
import Idris.Inliner
import Idris.PartialEval
import Idris.Transforms
import Idris.DeepSeq
import Idris.Output (iputStrLn, pshow, iWarn, iRenderResult, sendHighlighting)
import IRTS.Lang
import Idris.Elab.AsPat
import Idris.Elab.Type
import Idris.Elab.Transform
import Idris.Elab.Utils
import Idris.Core.TT
import Idris.Core.Elaborate hiding (Tactic(..))
import Idris.Core.Evaluate
import Idris.Core.Execute
import Idris.Core.Typecheck
import Idris.Core.CaseTree
import Idris.Docstrings hiding (Unchecked)
import Util.Pretty hiding ((<$>))
import Prelude hiding (id, (.))
import Control.Category
import Control.Applicative hiding (Const)
import Control.DeepSeq
import Control.Monad
import Control.Monad.State.Strict as State
import qualified Control.Monad.State.Lazy as LState
import Data.List
import Data.Maybe
import Debug.Trace
import qualified Data.Map as Map
import qualified Data.Set as S
import qualified Data.Text as T
import Data.Char(isLetter, toLower)
import Data.List.Split (splitOn)
import Util.Pretty(pretty, text)
import Numeric
-- | Elaborate a collection of left-hand and right-hand pairs - that is, a
-- top-level definition.
elabClauses :: ElabInfo -> FC -> FnOpts -> Name -> [PClause] -> Idris ()
elabClauses info' fc opts n_in cs =
do let n = liftname info n_in
info = info' { elabFC = Just fc }
ctxt <- getContext
ist <- getIState
optimise <- getOptimise
let petrans = PETransform `elem` optimise
inacc <- map fst <$> fgetState (opt_inaccessible . ist_optimisation n)
-- Check n actually exists, with no definition yet
let tys = lookupTy n ctxt
let reflect = Reflection `elem` opts
checkUndefined n ctxt
unless (length tys > 1) $ do
fty <- case tys of
[] -> -- TODO: turn into a CAF if there's no arguments
-- question: CAFs in where blocks?
tclift $ tfail $ At fc (NoTypeDecl n)
[ty] -> return ty
let atys = map snd (getArgTys fty)
cs_elab <- mapM (elabClause info opts)
(zip [0..] cs)
-- pats_raw is the version we'll work with at compile time:
-- no simplification or PE
let (pats_in, cs_full) = unzip cs_elab
let pats_raw = map (simple_lhs (tt_ctxt ist)) pats_in
logLvl 3 $ "Elaborated patterns:\n" ++ show pats_raw
solveDeferred n
-- just ensure that the structure exists
fmodifyState (ist_optimisation n) id
addIBC (IBCOpt n)
ist <- getIState
-- Don't apply rules if this is a partial evaluation definition,
-- or we'll make something that just runs itself!
let tpats = case specNames opts of
Nothing -> transformPats ist pats_in
_ -> pats_in
-- If the definition is specialisable, this reduces the
-- RHS
pe_tm <- doPartialEval ist tpats
let pats_pe = if petrans
then map (simple_lhs (tt_ctxt ist)) pe_tm
else pats_raw
let tcase = opt_typecase (idris_options ist)
-- Look for 'static' names and generate new specialised
-- definitions for them, as well as generating rewrite rules
-- for partially evaluated definitions
newrules <- if petrans
then mapM (\ e -> case e of
Left _ -> return []
Right (l, r) -> elabPE info fc n r) pats_pe
else return []
-- Redo transforms with the newly generated transformations, so
-- that the specialised application we've just made gets
-- used in place of the general one
ist <- getIState
let pats_transformed = if petrans
then transformPats ist pats_pe
else pats_pe
-- Summary of what's about to happen: Definitions go:
--
-- pats_in -> pats -> pdef -> pdef'
-- addCaseDef builds case trees from <pdef> and <pdef'>
-- pdef is the compile-time pattern definition.
-- This will get further inlined to help with totality checking.
let pdef = map debind pats_raw
-- pdef_pe is the one which will get further optimised
-- for run-time, and, partially evaluated
let pdef_pe = map debind pats_transformed
logLvl 5 $ "Initial typechecked patterns:\n" ++ show pats_raw
logLvl 5 $ "Initial typechecked pattern def:\n" ++ show pdef
-- NOTE: Need to store original definition so that proofs which
-- rely on its structure aren't affected by any changes to the
-- inliner. Just use the inlined version to generate pdef' and to
-- help with later inlinings.
ist <- getIState
let pdef_inl = inlineDef ist pdef
numArgs <- tclift $ sameLength pdef
case specNames opts of
Just _ ->
do logLvl 3 $ "Partially evaluated:\n" ++ show pats_pe
_ -> return ()
logLvl 3 $ "Transformed:\n" ++ show pats_transformed
erInfo <- getErasureInfo <$> getIState
tree@(CaseDef scargs sc _) <- tclift $
simpleCase tcase (UnmatchedCase "Error") reflect CompileTime fc inacc atys pdef erInfo
cov <- coverage
pmissing <-
if cov && not (hasDefault cs)
then do missing <- genClauses fc n (map getLHS pdef) cs_full
-- missing <- genMissing n scargs sc
missing' <- filterM (checkPossible info fc True n) missing
let clhs = map getLHS pdef
logLvl 2 $ "Must be unreachable:\n" ++
showSep "\n" (map showTmImpls missing') ++
"\nAgainst: " ++
showSep "\n" (map (\t -> showTmImpls (delab ist t)) (map getLHS pdef))
-- filter out anything in missing' which is
-- matched by any of clhs. This might happen since
-- unification may force a variable to take a
-- particular form, rather than force a case
-- to be impossible.
return (filter (noMatch ist clhs) missing')
else return []
let pcover = null pmissing
-- pdef' is the version that gets compiled for run-time,
-- so we start from the partially evaluated version
pdef_in' <- applyOpts pdef_pe
let pdef' = map (simple_rt (tt_ctxt ist)) pdef_in'
logLvl 5 $ "After data structure transformations:\n" ++ show pdef'
ist <- getIState
-- let wf = wellFounded ist n sc
let tot | pcover || AssertTotal `elem` opts = Unchecked -- finish later
| PEGenerated `elem` opts = Generated
| otherwise = Partial NotCovering -- already know it's not total
-- case lookupCtxt (namespace info) n (idris_flags ist) of
-- [fs] -> if TotalFn `elem` fs
-- then case tot of
-- Total _ -> return ()
-- t -> tclift $ tfail (At fc (Msg (show n ++ " is " ++ show t)))
-- _ -> return ()
case tree of
CaseDef _ _ [] -> return ()
CaseDef _ _ xs -> mapM_ (\x ->
iputStrLn $ show fc ++
":warning - Unreachable case: " ++
show (delab ist x)) xs
let knowncovering = (pcover && cov) || AssertTotal `elem` opts
let defaultcase = if knowncovering
then STerm Erased
else UnmatchedCase $ "*** " ++
show fc ++
":unmatched case in " ++ show n ++
" ***"
tree' <- tclift $ simpleCase tcase defaultcase reflect
RunTime fc inacc atys pdef' erInfo
logLvl 3 $ "Unoptimised " ++ show n ++ ": " ++ show tree
logLvl 3 $ "Optimised: " ++ show tree'
ctxt <- getContext
ist <- getIState
let opt = idris_optimisation ist
putIState (ist { idris_patdefs = addDef n (force pdef_pe, force pmissing)
(idris_patdefs ist) })
let caseInfo = CaseInfo (inlinable opts) (inlinable opts) (dictionary opts)
case lookupTy n ctxt of
[ty] -> do ctxt' <- do ctxt <- getContext
tclift $
addCasedef n erInfo caseInfo
tcase defaultcase
reflect
(AssertTotal `elem` opts)
atys
inacc
pats_pe
pdef
pdef -- compile time
pdef_inl -- inlined
pdef' ty
ctxt
setContext ctxt'
addIBC (IBCDef n)
setTotality n tot
when (not reflect && PEGenerated `notElem` opts) $
do totcheck (fc, n)
defer_totcheck (fc, n)
when (tot /= Unchecked) $ addIBC (IBCTotal n tot)
i <- getIState
case lookupDef n (tt_ctxt i) of
(CaseOp _ _ _ _ _ cd : _) ->
let (scargs, sc) = cases_compiletime cd
(scargs', sc') = cases_runtime cd in
do let calls = findCalls sc' scargs'
let used = findUsedArgs sc' scargs'
-- let scg = buildSCG i sc scargs
-- add SCG later, when checking totality
let cg = CGInfo scargs' calls [] used [] -- TODO: remove this, not needed anymore
logLvl 2 $ "Called names: " ++ show cg
addToCG n cg
addToCalledG n (nub (map fst calls)) -- plus names in type!
addIBC (IBCCG n)
_ -> return ()
return ()
-- addIBC (IBCTotal n tot)
[] -> return ()
-- Check it's covering, if 'covering' option is used. Chase
-- all called functions, and fail if any of them are also
-- 'Partial NotCovering'
when (CoveringFn `elem` opts) $ checkAllCovering fc [] n n
where
noMatch i cs tm = all (\x -> case trim_matchClause i (delab' i x True True) tm of
Right _ -> False
Left miss -> True) cs
where
trim_matchClause i (PApp fcl fl ls) (PApp fcr fr rs)
= let args = min (length ls) (length rs) in
matchClause i (PApp fcl fl (take args ls))
(PApp fcr fr (take args rs))
checkUndefined n ctxt = case lookupDef n ctxt of
[] -> return ()
[TyDecl _ _] -> return ()
_ -> tclift $ tfail (At fc (AlreadyDefined n))
debind (Right (x, y)) = let (vs, x') = depat [] x
(_, y') = depat [] y in
(vs, x', y')
debind (Left x) = let (vs, x') = depat [] x in
(vs, x', Impossible)
depat acc (Bind n (PVar t) sc) = depat (n : acc) (instantiate (P Bound n t) sc)
depat acc x = (acc, x)
hasDefault cs | (PClause _ _ last _ _ _ :_) <- reverse cs
, (PApp fn s args) <- last = all ((==Placeholder) . getTm) args
hasDefault _ = False
getLHS (_, l, _) = l
simple_lhs ctxt (Right (x, y)) = Right (normalise ctxt [] x, y)
simple_lhs ctxt t = t
simple_rt ctxt (p, x, y) = (p, x, force (uniqueBinders p
(rt_simplify ctxt [] y)))
specNames [] = Nothing
specNames (Specialise ns : _) = Just ns
specNames (_ : xs) = specNames xs
sameLength ((_, x, _) : xs)
= do l <- sameLength xs
let (f, as) = unApply x
if (null xs || l == length as) then return (length as)
else tfail (At fc (Msg "Clauses have differing numbers of arguments "))
sameLength [] = return 0
-- Partially evaluate, if the definition is marked as specialisable
doPartialEval ist pats =
case specNames opts of
Nothing -> return pats
Just ns -> case partial_eval (tt_ctxt ist) ns pats of
Just t -> return t
Nothing -> ierror (At fc (Msg "No specialisation achieved"))
-- | Find 'static' applications in a term and partially evaluate them.
-- Return any new transformation rules
elabPE :: ElabInfo -> FC -> Name -> Term -> Idris [(Term, Term)]
elabPE info fc caller r =
do ist <- getIState
let sa = filter (\ap -> fst ap /= caller) $ getSpecApps ist [] r
rules <- mapM mkSpecialised sa
return $ concat rules
where
-- Make a specialised version of the application, and
-- add a PTerm level transformation rule, which is basically the
-- new definition in reverse (before specialising it).
-- RHS => LHS where implicit arguments are left blank in the
-- transformation.
-- Transformation rules are applied after every PClause elaboration
mkSpecialised :: (Name, [(PEArgType, Term)]) -> Idris [(Term, Term)]
mkSpecialised specapp_in = do
ist <- getIState
let (specTy, specapp) = getSpecTy ist specapp_in
let (n, newnm, specdecl) = getSpecClause ist specapp
let lhs = pe_app specdecl
let rhs = pe_def specdecl
let undef = case lookupDefExact newnm (tt_ctxt ist) of
Nothing -> True
_ -> False
logLvl 5 $ show (newnm, undef, map (concreteArg ist) (snd specapp))
idrisCatch
(if (undef && all (concreteArg ist) (snd specapp)) then do
cgns <- getAllNames n
-- on the RHS of the new definition, we should reduce
-- everything that's not itself static (because we'll
-- want to be a PE version of those next)
let cgns' = filter (\x -> x /= n &&
notStatic ist x) cgns
-- set small reduction limit on partial/productive things
let maxred = case lookupTotal n (tt_ctxt ist) of
[Total _] -> 65536
[Productive] -> 16
_ -> 1
let opts = [Specialise ((if pe_simple specdecl
then map (\x -> (x, Nothing)) cgns'
else []) ++
(n, Just maxred) :
mapMaybe (specName (pe_simple specdecl))
(snd specapp))]
logLvl 3 $ "Specialising application: " ++ show specapp
++ " in " ++ show caller ++
" with " ++ show opts
logLvl 3 $ "New name: " ++ show newnm
logLvl 3 $ "PE definition type : " ++ (show specTy)
++ "\n" ++ show opts
logLvl 3 $ "PE definition " ++ show newnm ++ ":\n" ++
showSep "\n"
(map (\ (lhs, rhs) ->
(showTmImpls lhs ++ " = " ++
showTmImpls rhs)) (pe_clauses specdecl))
logLvl 2 $ show n ++ " transformation rule: " ++
showTmImpls rhs ++ " ==> " ++ showTmImpls lhs
elabType info defaultSyntax emptyDocstring [] fc opts newnm NoFC specTy
let def = map (\(lhs, rhs) ->
let lhs' = mapPT hiddenToPH $ stripUnmatchable ist lhs in
PClause fc newnm lhs' [] rhs [])
(pe_clauses specdecl)
trans <- elabTransform info fc False rhs lhs
elabClauses info fc (PEGenerated:opts) newnm def
return [trans]
else return [])
-- if it doesn't work, just don't specialise. Could happen for lots
-- of valid reasons (e.g. local variables in scope which can't be
-- lifted out).
(\e -> do logLvl 3 $ "Couldn't specialise: " ++ (pshow ist e)
return [])
hiddenToPH (PHidden _) = Placeholder
hiddenToPH x = x
specName simpl (ImplicitS, tm)
| (P Ref n _, _) <- unApply tm = Just (n, Just (if simpl then 1 else 0))
specName simpl (ExplicitS, tm)
| (P Ref n _, _) <- unApply tm = Just (n, Just (if simpl then 1 else 0))
specName simpl _ = Nothing
notStatic ist n = case lookupCtxtExact n (idris_statics ist) of
Just s -> not (or s)
_ -> True
concreteArg ist (ImplicitS, tm) = concreteTm ist tm
concreteArg ist (ExplicitS, tm) = concreteTm ist tm
concreteArg ist _ = True
concreteTm ist tm | (P _ n _, _) <- unApply tm =
case lookupTy n (tt_ctxt ist) of
[] -> False
_ -> True
concreteTm ist (Constant _) = True
concreteTm ist (Bind n (Lam _) sc) = True
concreteTm ist (Bind n (Pi _ _ _) sc) = True
concreteTm ist (Bind n (Let _ _) sc) = concreteTm ist sc
concreteTm ist _ = False
-- get the type of a specialised application
getSpecTy ist (n, args)
= case lookupTy n (tt_ctxt ist) of
[ty] -> let (specty_in, args') = specType args (explicitNames ty)
specty = normalise (tt_ctxt ist) [] (finalise specty_in)
t = mkPE_TyDecl ist args' (explicitNames specty) in
(t, (n, args'))
-- (normalise (tt_ctxt ist) [] (specType args ty))
_ -> error "Can't happen (getSpecTy)"
-- get the clause of a specialised application
getSpecClause ist (n, args)
= let newnm = sUN ("PE_" ++ show (nsroot n) ++ "_" ++
qhash 5381 (showSep "_" (map showArg args))) in
-- UN (show n ++ show (map snd args)) in
(n, newnm, mkPE_TermDecl ist newnm n args)
where showArg (ExplicitS, n) = qshow n
showArg (ImplicitS, n) = qshow n
showArg _ = ""
qshow (Bind _ _ _) = "fn"
qshow (App _ f a) = qshow f ++ qshow a
qshow (P _ n _) = show n
qshow (Constant c) = show c
qshow _ = ""
-- Simple but effective string hashing...
-- Keep it to 32 bits for readability/debuggability
qhash :: Int -> String -> String
qhash hash [] = showHex (abs hash `mod` 0xffffffff) ""
qhash hash (x:xs) = qhash (hash * 33 + fromEnum x) xs
-- | Checks if the clause is a possible left hand side.
checkPossible :: ElabInfo -> FC -> Bool -> Name -> PTerm -> Idris Bool
checkPossible info fc tcgen fname lhs_in
= do ctxt <- getContext
i <- getIState
let lhs = addImplPat i lhs_in
-- if the LHS type checks, it is possible
case elaborate ctxt (idris_datatypes i) (sMN 0 "patLHS") infP initEState
(erun fc (buildTC i info ELHS [] fname (infTerm lhs))) of
OK (ElabResult lhs' _ _ ctxt' newDecls highlights, _) ->
do setContext ctxt'
processTacticDecls info newDecls
sendHighlighting highlights
let lhs_tm = orderPats (getInferTerm lhs')
case recheck ctxt [] (forget lhs_tm) lhs_tm of
OK _ -> return True
err -> return False
-- if it's a recoverable error, the case may become possible
Error err -> if tcgen then return (recoverableCoverage ctxt err)
else return (validCoverageCase ctxt err ||
recoverableCoverage ctxt err)
propagateParams :: IState -> [Name] -> Type -> PTerm -> PTerm
propagateParams i ps t tm@(PApp _ (PRef fc hls n) args)
= PApp fc (PRef fc hls n) (addP t args)
where addP (Bind n _ sc) (t : ts)
| Placeholder <- getTm t,
n `elem` ps,
not (n `elem` allNamesIn tm)
= t { getTm = PRef NoFC [] n } : addP sc ts
addP (Bind n _ sc) (t : ts) = t : addP sc ts
addP _ ts = ts
propagateParams i ps t (PRef fc hls n)
= case lookupCtxt n (idris_implicits i) of
[is] -> let ps' = filter (isImplicit is) ps in
PApp fc (PRef fc hls n) (map (\x -> pimp x (PRef fc [] x) True) ps')
_ -> PRef fc hls n
where isImplicit [] n = False
isImplicit (PImp _ _ _ x _ : is) n | x == n = True
isImplicit (_ : is) n = isImplicit is n
propagateParams i ps t x = x
findUnique :: Context -> Env -> Term -> [Name]
findUnique ctxt env (Bind n b sc)
= let rawTy = forgetEnv (map fst env) (binderTy b)
uniq = case check ctxt env rawTy of
OK (_, UType UniqueType) -> True
OK (_, UType NullType) -> True
OK (_, UType AllTypes) -> True
_ -> False in
if uniq then n : findUnique ctxt ((n, b) : env) sc
else findUnique ctxt ((n, b) : env) sc
findUnique _ _ _ = []
-- Return the elaborated LHS/RHS, and the original LHS with implicits added
elabClause :: ElabInfo -> FnOpts -> (Int, PClause) ->
Idris (Either Term (Term, Term), PTerm)
elabClause info opts (_, PClause fc fname lhs_in [] PImpossible [])
= do let tcgen = Dictionary `elem` opts
i <- get
let lhs = addImpl [] i lhs_in
b <- checkPossible info fc tcgen fname lhs_in
case b of
True -> tclift $ tfail (At fc
(Msg $ show lhs_in ++ " is a valid case"))
False -> do ptm <- mkPatTm lhs_in
return (Left ptm, lhs)
elabClause info opts (cnum, PClause fc fname lhs_in_as withs rhs_in_as whereblock)
= do let tcgen = Dictionary `elem` opts
push_estack fname False
ctxt <- getContext
let (lhs_in, rhs_in) = desugarAs lhs_in_as rhs_in_as
-- Build the LHS as an "Infer", and pull out its type and
-- pattern bindings
i <- getIState
inf <- isTyInferred fname
-- get the parameters first, to pass through to any where block
let fn_ty = case lookupTy fname (tt_ctxt i) of
[t] -> t
_ -> error "Can't happen (elabClause function type)"
let fn_is = case lookupCtxt fname (idris_implicits i) of
[t] -> t
_ -> []
let params = getParamsInType i [] fn_is (normalise ctxt [] fn_ty)
let lhs = mkLHSapp $ stripLinear i $ stripUnmatchable i $
propagateParams i params fn_ty (addImplPat i lhs_in)
-- let lhs = mkLHSapp $
-- propagateParams i params fn_ty (addImplPat i lhs_in)
logLvl 5 ("LHS: " ++ show fc ++ " " ++ showTmImpls lhs)
logLvl 4 ("Fixed parameters: " ++ show params ++ " from " ++ show lhs_in ++
"\n" ++ show (fn_ty, fn_is))
((ElabResult lhs' dlhs [] ctxt' newDecls highlights, probs, inj), _) <-
tclift $ elaborate ctxt (idris_datatypes i) (sMN 0 "patLHS") infP initEState
(do res <- errAt "left hand side of " fname
(erun fc (buildTC i info ELHS opts fname (infTerm lhs)))
probs <- get_probs
inj <- get_inj
return (res, probs, inj))
setContext ctxt'
processTacticDecls info newDecls
sendHighlighting highlights
when inf $ addTyInfConstraints fc (map (\(x,y,_,_,_,_,_) -> (x,y)) probs)
let lhs_tm = orderPats (getInferTerm lhs')
let lhs_ty = getInferType lhs'
let static_names = getStaticNames i lhs_tm
logLvl 3 ("Elaborated: " ++ show lhs_tm)
logLvl 3 ("Elaborated type: " ++ show lhs_ty)
logLvl 5 ("Injective: " ++ show fname ++ " " ++ show inj)
-- If we're inferring metavariables in the type, don't recheck,
-- because we're only doing this to try to work out those metavariables
(clhs_c, clhsty) <- if not inf
then recheckC fc id [] lhs_tm
else return (lhs_tm, lhs_ty)
let clhs = normalise ctxt [] clhs_c
let borrowed = borrowedNames [] clhs
-- These are the names we're not allowed to use on the RHS, because
-- they're UniqueTypes and borrowed from another function.
-- FIXME: There is surely a nicer way than this...
-- Issue #1615 on the Issue Tracker.
-- https://github.com/idris-lang/Idris-dev/issues/1615
when (not (null borrowed)) $
logLvl 5 ("Borrowed names on LHS: " ++ show borrowed)
logLvl 3 ("Normalised LHS: " ++ showTmImpls (delabMV i clhs))
rep <- useREPL
when rep $ do
addInternalApp (fc_fname fc) (fst . fc_start $ fc) (delabMV i clhs) -- TODO: Should use span instead of line and filename?
addIBC (IBCLineApp (fc_fname fc) (fst . fc_start $ fc) (delabMV i clhs))
logLvl 5 ("Checked " ++ show clhs ++ "\n" ++ show clhsty)
-- Elaborate where block
ist <- getIState
windex <- getName
let decls = nub (concatMap declared whereblock)
let defs = nub (decls ++ concatMap defined whereblock)
let newargs_all = pvars ist lhs_tm
-- Unique arguments must be passed to the where block explicitly
-- (since we can't control "usage" easlily otherwise). Remove them
-- from newargs here
let uniqargs = findUnique (tt_ctxt ist) [] lhs_tm
let newargs = filter (\(n,_) -> n `notElem` uniqargs) newargs_all
let winfo = (pinfo info newargs defs windex) { elabFC = Just fc }
let wb = map (mkStatic static_names) $
map (expandParamsD False ist decorate newargs defs) whereblock
-- Split the where block into declarations with a type, and those
-- without
-- Elaborate those with a type *before* RHS, those without *after*
let (wbefore, wafter) = sepBlocks wb
logLvl 2 $ "Where block:\n " ++ show wbefore ++ "\n" ++ show wafter
mapM_ (rec_elabDecl info EAll winfo) wbefore
-- Now build the RHS, using the type of the LHS as the goal.
i <- getIState -- new implicits from where block
logLvl 5 (showTmImpls (expandParams decorate newargs defs (defs \\ decls) rhs_in))
let rhs = addImplBoundInf i (map fst newargs_all) (defs \\ decls)
(expandParams decorate newargs defs (defs \\ decls) rhs_in)
logLvl 2 $ "RHS: " ++ show (map fst newargs_all) ++ " " ++ showTmImpls rhs
ctxt <- getContext -- new context with where block added
logLvl 5 "STARTING CHECK"
((rhs', defer, is, probs, ctxt', newDecls, highlights), _) <-
tclift $ elaborate ctxt (idris_datatypes i) (sMN 0 "patRHS") clhsty initEState
(do pbinds ist lhs_tm
-- proof search can use explicitly written names
mapM_ addPSname (allNamesIn lhs_in)
mapM_ setinj (nub (params ++ inj))
setNextName
(ElabResult _ _ is ctxt' newDecls highlights) <-
errAt "right hand side of " fname
(erun fc (build i winfo ERHS opts fname rhs))
errAt "right hand side of " fname
(erun fc $ psolve lhs_tm)
hs <- get_holes
mapM_ (elabCaseHole is) hs
tt <- get_term
aux <- getAux
let (tm, ds) = runState (collectDeferred (Just fname)
(map fst $ case_decls aux) ctxt tt) []
probs <- get_probs
return (tm, ds, is, probs, ctxt', newDecls, highlights))
setContext ctxt'
processTacticDecls info newDecls
sendHighlighting highlights
when inf $ addTyInfConstraints fc (map (\(x,y,_,_,_,_,_) -> (x,y)) probs)
logLvl 5 "DONE CHECK"
logLvl 4 $ "---> " ++ show rhs'
when (not (null defer)) $ logLvl 1 $ "DEFERRED " ++
show (map (\ (n, (_,_,t,_)) -> (n, t)) defer)
def' <- checkDef fc (Elaborating "deferred type of ") defer
let def'' = map (\(n, (i, top, t, ns)) -> (n, (i, top, t, ns, False))) def'
addDeferred def''
mapM_ (\(n, _) -> addIBC (IBCDef n)) def''
when (not (null def')) $ do
mapM_ defer_totcheck (map (\x -> (fc, fst x)) def'')
-- Now the remaining deferred (i.e. no type declarations) clauses
-- from the where block
mapM_ (rec_elabDecl info EAll winfo) wafter
mapM_ (elabCaseBlock winfo opts) is
ctxt <- getContext
logLvl 5 $ "Rechecking"
logLvl 6 $ " ==> " ++ show (forget rhs')
(crhs, crhsty) <- if not inf
then recheckC_borrowing True borrowed fc id [] rhs'
else return (rhs', clhsty)
logLvl 6 $ " ==> " ++ showEnvDbg [] crhsty ++ " against " ++ showEnvDbg [] clhsty
ctxt <- getContext
let constv = next_tvar ctxt
case LState.runStateT (convertsC ctxt [] crhsty clhsty) (constv, []) of
OK (_, cs) -> do addConstraints fc cs
logLvl 6 $ "CONSTRAINTS ADDED: " ++ show cs
return ()
Error e -> ierror (At fc (CantUnify False (clhsty, Nothing) (crhsty, Nothing) e [] 0))
i <- getIState
checkInferred fc (delab' i crhs True True) rhs
-- if the function is declared '%error_reverse', or its type,
-- then we'll try running it in reverse to improve error messages
let (ret_fam, _) = unApply (getRetTy crhsty)
rev <- case ret_fam of
P _ rfamn _ ->
case lookupCtxt rfamn (idris_datatypes i) of
[TI _ _ dopts _ _] ->
return (DataErrRev `elem` dopts)
_ -> return False
_ -> return False
when (rev || ErrorReverse `elem` opts) $ do
addIBC (IBCErrRev (crhs, clhs))
addErrRev (crhs, clhs)
pop_estack
return $ (Right (clhs, crhs), lhs)
where
pinfo :: ElabInfo -> [(Name, PTerm)] -> [Name] -> Int -> ElabInfo
pinfo info ns ds i
= let newps = params info ++ ns
dsParams = map (\n -> (n, map fst newps)) ds
newb = addAlist dsParams (inblock info)
l = liftname info in
info { params = newps,
inblock = newb,
liftname = id -- (\n -> case lookupCtxt n newb of
-- Nothing -> n
-- _ -> MN i (show n)) . l
}
-- Find the variable names which appear under a 'Ownership.Read' so that
-- we know they can't be used on the RHS
borrowedNames :: [Name] -> Term -> [Name]
borrowedNames env (App _ (App _ (P _ (NS (UN lend) [owner]) _) _) arg)
| owner == txt "Ownership" &&
(lend == txt "lend" || lend == txt "Read") = getVs arg
where
getVs (V i) = [env!!i]
getVs (App _ f a) = nub $ getVs f ++ getVs a
getVs _ = []
borrowedNames env (App _ f a) = nub $ borrowedNames env f ++ borrowedNames env a
borrowedNames env (Bind n b sc) = nub $ borrowedB b ++ borrowedNames (n:env) sc
where borrowedB (Let t v) = nub $ borrowedNames env t ++ borrowedNames env v
borrowedB b = borrowedNames env (binderTy b)
borrowedNames _ _ = []
mkLHSapp t@(PRef _ _ _) = PApp fc t []
mkLHSapp t = t
decorate (NS x ns)
= NS (SN (WhereN cnum fname x)) ns -- ++ [show cnum])
-- = NS (UN ('#':show x)) (ns ++ [show cnum, show fname])
decorate x
= SN (WhereN cnum fname x)
-- = NS (SN (WhereN cnum fname x)) [show cnum]
-- = NS (UN ('#':show x)) [show cnum, show fname]
sepBlocks bs = sepBlocks' [] bs where
sepBlocks' ns (d@(PTy _ _ _ _ _ n _ t) : bs)
= let (bf, af) = sepBlocks' (n : ns) bs in
(d : bf, af)
sepBlocks' ns (d@(PClauses _ _ n _) : bs)
| not (n `elem` ns) = let (bf, af) = sepBlocks' ns bs in
(bf, d : af)
sepBlocks' ns (b : bs) = let (bf, af) = sepBlocks' ns bs in
(b : bf, af)
sepBlocks' ns [] = ([], [])
-- if a hole is just an argument/result of a case block, treat it as
-- the unit type. Hack to help elaborate case in do blocks.
elabCaseHole aux h = do
focus h
g <- goal
case g of
TType _ -> when (any (isArg h) aux) $ do apply (Var unitTy) []; solve
_ -> return ()
-- Is the name a pattern argument in the declaration
isArg :: Name -> PDecl -> Bool
isArg n (PClauses _ _ _ cs) = any isArg' cs
where
isArg' (PClause _ _ (PApp _ _ args) _ _ _)
= any (\x -> case x of
PRef _ _ n' -> n == n'
_ -> False) (map getTm args)
isArg' _ = False
isArg _ _ = False
elabClause info opts (_, PWith fc fname lhs_in withs wval_in pn_in withblock)
= do let tcgen = Dictionary `elem` opts
ctxt <- getContext
-- Build the LHS as an "Infer", and pull out its type and
-- pattern bindings
i <- getIState
-- get the parameters first, to pass through to any where block
let fn_ty = case lookupTy fname (tt_ctxt i) of
[t] -> t
_ -> error "Can't happen (elabClause function type)"
let fn_is = case lookupCtxt fname (idris_implicits i) of
[t] -> t
_ -> []
let params = getParamsInType i [] fn_is (normalise ctxt [] fn_ty)
let lhs = stripLinear i $ stripUnmatchable i $ propagateParams i params fn_ty (addImplPat i lhs_in)
logLvl 2 ("LHS: " ++ show lhs)
(ElabResult lhs' dlhs [] ctxt' newDecls highlights, _) <-
tclift $ elaborate ctxt (idris_datatypes i) (sMN 0 "patLHS") infP initEState
(errAt "left hand side of with in " fname
(erun fc (buildTC i info ELHS opts fname (infTerm lhs))) )
setContext ctxt'
processTacticDecls info newDecls
sendHighlighting highlights
let lhs_tm = orderPats (getInferTerm lhs')
let lhs_ty = getInferType lhs'
let ret_ty = getRetTy (explicitNames (normalise ctxt [] lhs_ty))
let static_names = getStaticNames i lhs_tm
logLvl 5 (show lhs_tm ++ "\n" ++ show static_names)
(clhs, clhsty) <- recheckC fc id [] lhs_tm
logLvl 5 ("Checked " ++ show clhs)
let bargs = getPBtys (explicitNames (normalise ctxt [] lhs_tm))
let wval = addImplBound i (map fst bargs) wval_in
logLvl 5 ("Checking " ++ showTmImpls wval)
-- Elaborate wval in this context
((wval', defer, is, ctxt', newDecls, highlights), _) <-
tclift $ elaborate ctxt (idris_datatypes i) (sMN 0 "withRHS")
(bindTyArgs PVTy bargs infP) initEState
(do pbinds i lhs_tm
-- proof search can use explicitly written names
mapM_ addPSname (allNamesIn lhs_in)
setNextName
-- TODO: may want where here - see winfo abpve
(ElabResult _ d is ctxt' newDecls highlights) <- errAt "with value in " fname
(erun fc (build i info ERHS opts fname (infTerm wval)))
erun fc $ psolve lhs_tm
tt <- get_term
return (tt, d, is, ctxt', newDecls, highlights))
setContext ctxt'
processTacticDecls info newDecls
sendHighlighting highlights
def' <- checkDef fc iderr defer
let def'' = map (\(n, (i, top, t, ns)) -> (n, (i, top, t, ns, False))) def'
addDeferred def''
mapM_ (elabCaseBlock info opts) is
logLvl 5 ("Checked wval " ++ show wval')
(cwval, cwvalty) <- recheckC fc id [] (getInferTerm wval')
let cwvaltyN = explicitNames (normalise ctxt [] cwvalty)
let cwvalN = explicitNames (normalise ctxt [] cwval)
logLvl 3 ("With type " ++ show cwvalty ++ "\nRet type " ++ show ret_ty)
-- We're going to assume the with type is not a function shortly,
-- so report an error if it is (you can't match on a function anyway
-- so this doesn't lose anything)
case getArgTys cwvaltyN of
[] -> return ()
(_:_) -> ierror $ At fc (WithFnType cwvalty)
let pvars = map fst (getPBtys cwvalty)
-- we need the unelaborated term to get the names it depends on
-- rather than a de Bruijn index.
let pdeps = usedNamesIn pvars i (delab i cwvalty)
let (bargs_pre, bargs_post) = split pdeps bargs []
let mpn = case pn_in of
Nothing -> Nothing
Just (n, nfc) -> Just (uniqueName n (map fst bargs))
-- Highlight explicit proofs
sendHighlighting $ [(fc, AnnBoundName n False) | (n, fc) <- maybeToList pn_in]
logLvl 10 ("With type " ++ show (getRetTy cwvaltyN) ++
" depends on " ++ show pdeps ++ " from " ++ show pvars)
logLvl 10 ("Pre " ++ show bargs_pre ++ "\nPost " ++ show bargs_post)
windex <- getName
-- build a type declaration for the new function:
-- (ps : Xs) -> (withval : cwvalty) -> (ps' : Xs') -> ret_ty
let wargval = getRetTy cwvalN
let wargtype = getRetTy cwvaltyN
let wargname = sMN windex "warg"
logLvl 5 ("Abstract over " ++ show wargval ++ " in " ++ show wargtype)
let wtype = bindTyArgs (flip (Pi Nothing) (TType (UVar 0))) (bargs_pre ++
(wargname, wargtype) :
map (abstract wargname wargval wargtype) bargs_post ++
case mpn of
Just pn -> [(pn, mkApp (P Ref eqTy Erased)
[wargtype, wargtype,
P Bound wargname Erased, wargval])]
Nothing -> [])
(substTerm wargval (P Bound wargname wargtype) ret_ty)
logLvl 3 ("New function type " ++ show wtype)
let wname = SN (WithN windex fname)
let imps = getImps wtype -- add to implicits context
putIState (i { idris_implicits = addDef wname imps (idris_implicits i) })
let statics = getStatics static_names wtype
logLvl 5 ("Static positions " ++ show statics)
i <- getIState
putIState (i { idris_statics = addDef wname statics (idris_statics i) })
addIBC (IBCDef wname)
addIBC (IBCImp wname)
addIBC (IBCStatic wname)
def' <- checkDef fc iderr [(wname, (-1, Nothing, wtype, []))]
let def'' = map (\(n, (i, top, t, ns)) -> (n, (i, top, t, ns, False))) def'
addDeferred def''
-- in the subdecls, lhs becomes:
-- fname pats | wpat [rest]
-- ==> fname' ps wpat [rest], match pats against toplevel for ps
wb <- mapM (mkAuxC mpn wname lhs (map fst bargs_pre) (map fst bargs_post))
withblock
logLvl 3 ("with block " ++ show wb)
-- propagate totality assertion to the new definitions
when (AssertTotal `elem` opts) $ setFlags wname [AssertTotal]
mapM_ (rec_elabDecl info EAll info) wb
-- rhs becomes: fname' ps_pre wval ps_post Refl
let rhs = PApp fc (PRef fc [] wname)
(map (pexp . (PRef fc []) . fst) bargs_pre ++
pexp wval :
(map (pexp . (PRef fc []) . fst) bargs_post) ++
case mpn of
Nothing -> []
Just _ -> [pexp (PApp NoFC (PRef NoFC [] eqCon)
[ pimp (sUN "A") Placeholder False
, pimp (sUN "x") Placeholder False
])])
logLvl 5 ("New RHS " ++ showTmImpls rhs)
ctxt <- getContext -- New context with block added
i <- getIState
((rhs', defer, is, ctxt', newDecls, highlights), _) <-
tclift $ elaborate ctxt (idris_datatypes i) (sMN 0 "wpatRHS") clhsty initEState
(do pbinds i lhs_tm
setNextName
(ElabResult _ d is ctxt' newDecls highlights) <-
erun fc (build i info ERHS opts fname rhs)
psolve lhs_tm
tt <- get_term
return (tt, d, is, ctxt', newDecls, highlights))
setContext ctxt'
processTacticDecls info newDecls
sendHighlighting highlights
def' <- checkDef fc iderr defer
let def'' = map (\(n, (i, top, t, ns)) -> (n, (i, top, t, ns, False))) def'
addDeferred def''
mapM_ (elabCaseBlock info opts) is
logLvl 5 ("Checked RHS " ++ show rhs')
(crhs, crhsty) <- recheckC fc id [] rhs'
return $ (Right (clhs, crhs), lhs)
where
getImps (Bind n (Pi _ _ _) t) = pexp Placeholder : getImps t
getImps _ = []
mkAuxC pn wname lhs ns ns' (PClauses fc o n cs)
| True = do cs' <- mapM (mkAux pn wname lhs ns ns') cs
return $ PClauses fc o wname cs'
| otherwise = ifail $ show fc ++ "with clause uses wrong function name " ++ show n
mkAuxC pn wname lhs ns ns' d = return $ d
mkAux pn wname toplhs ns ns' (PClause fc n tm_in (w:ws) rhs wheres)
= do i <- getIState
let tm = addImplPat i tm_in
logLvl 2 ("Matching " ++ showTmImpls tm ++ " against " ++
showTmImpls toplhs)
case matchClause i toplhs tm of
Left (a,b) -> ifail $ show fc ++ ":with clause does not match top level"
Right mvars ->
do logLvl 3 ("Match vars : " ++ show mvars)
lhs <- updateLHS n pn wname mvars ns ns' (fullApp tm) w
return $ PClause fc wname lhs ws rhs wheres
mkAux pn wname toplhs ns ns' (PWith fc n tm_in (w:ws) wval pn' withs)
= do i <- getIState
let tm = addImplPat i tm_in
logLvl 2 ("Matching " ++ showTmImpls tm ++ " against " ++
showTmImpls toplhs)
withs' <- mapM (mkAuxC pn wname toplhs ns ns') withs
case matchClause i toplhs tm of
Left (a,b) -> trace ("matchClause: " ++ show a ++ " =/= " ++ show b) (ifail $ show fc ++ "with clause does not match top level")
Right mvars ->
do lhs <- updateLHS n pn wname mvars ns ns' (fullApp tm) w
return $ PWith fc wname lhs ws wval pn' withs'
mkAux pn wname toplhs ns ns' c
= ifail $ show fc ++ ":badly formed with clause"
addArg (PApp fc f args) w = PApp fc f (args ++ [pexp w])
addArg (PRef fc hls f) w = PApp fc (PRef fc hls f) [pexp w]
updateLHS n pn wname mvars ns_in ns_in' (PApp fc (PRef fc' hls' n') args) w
= let ns = map (keepMvar (map fst mvars) fc') ns_in
ns' = map (keepMvar (map fst mvars) fc') ns_in' in
return $ substMatches mvars $
PApp fc (PRef fc' [] wname)
(map pexp ns ++ pexp w : (map pexp ns') ++
case pn of
Nothing -> []
Just pnm -> [pexp (PRef fc [] pnm)])
updateLHS n pn wname mvars ns_in ns_in' tm w
= updateLHS n pn wname mvars ns_in ns_in' (PApp fc tm []) w
keepMvar mvs fc v | v `elem` mvs = PRef fc [] v
| otherwise = Placeholder
fullApp (PApp _ (PApp fc f args) xs) = fullApp (PApp fc f (args ++ xs))
fullApp x = x
split [] rest pre = (reverse pre, rest)
split deps ((n, ty) : rest) pre
| n `elem` deps = split (deps \\ [n]) rest ((n, ty) : pre)
| otherwise = split deps rest ((n, ty) : pre)
split deps [] pre = (reverse pre, [])
abstract wn wv wty (n, argty) = (n, substTerm wv (P Bound wn wty) argty)
| uwap/Idris-dev | src/Idris/Elab/Clause.hs | bsd-3-clause | 47,429 | 10 | 28 | 19,125 | 13,652 | 6,761 | 6,891 | 757 | 42 |
module HAD.Y2114.M03.D21.Solution where
import Control.Applicative
import Control.Arrow
import Data.Maybe
-- $setup
-- >>> import Test.QuickCheck
-- >>> import Data.Maybe (fromJust)
-- | minmax
-- get apair of the min and max element of a list (in one pass)
-- returns Nothing on empty list
--
-- Point-free: checked
--
-- The function signature follows the idea of the methods in the System.Random
-- module: given a standard generator, you returns the modified list and the
-- generator in an altered state.
--
-- >>> minmax [0..10]
-- Just (0,10)
--
-- >>> minmax []
-- Nothing
--
-- prop> \(NonEmpty(xs)) -> minimum xs == (fst . fromJust . minmax) xs
-- prop> \(NonEmpty(xs)) -> maximum xs == (snd . fromJust . minmax) xs
--
minmax :: Ord a => [a] -> Maybe (a,a)
minmax =
listToMaybe
. scanr1 (flip (liftA2 (&&&) (min . fst) (max . snd)) . fst)
. (zip <*> id)
minmaxNoPF :: Ord a => [a] -> Maybe (a,a)
minmaxNoPF [] = Nothing
minmaxNoPF (x:xs) =
listToMaybe
. scanr (flip (liftA2 (&&&) (min . fst) (max . snd))) (x,x)
$ xs
| 1HaskellADay/1HAD | exercises/HAD/Y2014/M03/D21/Solution.hs | mit | 1,049 | 0 | 14 | 206 | 237 | 141 | 96 | 15 | 1 |
{-# LANGUAGE BangPatterns,CPP #-}
import System.Directory
import System.FilePath
import Control.Concurrent.Async
import System.Environment
import Data.List hiding (find)
import Control.Exception (finally)
import Data.Maybe (isJust)
import Control.Concurrent.MVar
import Control.Concurrent.STM
import Data.IORef
import GHC.Conc (getNumCapabilities)
import CasIORef
-- <<main
main = do
[s,d] <- getArgs
n <- getNumCapabilities
sem <- newNBSem (if n == 1 then 0 else n * 4)
find sem s d >>= print
-- >>
-- <<find
find :: NBSem -> String -> FilePath -> IO (Maybe FilePath)
find sem s d = do
fs <- getDirectoryContents d
let fs' = sort $ filter (`notElem` [".",".."]) fs
if any (== s) fs'
then return (Just (d </> s))
else do
let ps = map (d </>) fs' -- <1>
foldr (subfind sem s) dowait ps [] -- <2>
where
dowait as = loop (reverse as) -- <3>
loop [] = return Nothing
loop (a:as) = do -- <4>
r <- wait a -- <5>
case r of
Nothing -> loop as -- <6>
Just a -> return (Just a) -- <7>
-- >>
-- <<subfind
subfind :: NBSem -> String -> FilePath
-> ([Async (Maybe FilePath)] -> IO (Maybe FilePath))
-> [Async (Maybe FilePath)] -> IO (Maybe FilePath)
subfind sem s p inner asyncs = do
isdir <- doesDirectoryExist p
if isdir
then do
q <- tryWaitNBSem sem
if q
then withAsync (find sem s p `finally` signalNBSem sem) $ \a ->
inner (a:asyncs)
else do r <- find sem s p
if isJust r then return r else inner asyncs
else inner asyncs
-- >>
-- <<NBSem
newtype NBSem = NBSem (IORef Int)
newNBSem :: Int -> IO NBSem
newNBSem i = do
m <- newIORef i
return (NBSem m)
tryWaitNBSem :: NBSem -> IO Bool
tryWaitNBSem (NBSem m) = do
atomicModifyIORef m $ \i ->
if i == 0
then (i, False)
else let !z = i-1 in (z, True)
signalNBSem :: NBSem -> IO ()
signalNBSem (NBSem m) =
atomicModifyIORef m $ \i ->
let !z = i+1 in (z, ())
-- >>
| prt2121/haskell-practice | parconc/findpar3.hs | apache-2.0 | 2,096 | 1 | 15 | 632 | 843 | 427 | 416 | 63 | 4 |
{-# LANGUAGE BangPatterns, MagicHash, UnboxedTuples, DefaultSignatures, TypeOperators, FlexibleContexts #-}
module Parallel
(NFData, parMap, rdeepseq) where
import Control.Monad
import GHC.Exts
import Control.DeepSeq
infixl 0 `using`
type Strategy a = a -> Eval a
newtype Eval a = Eval (State# RealWorld -> (# State# RealWorld, a #))
instance Functor Eval where
fmap = liftM
instance Applicative Eval where
pure x = Eval $ \s -> (# s, x #)
(<*>) = ap
instance Monad Eval where
return = pure
Eval x >>= k = Eval $ \s -> case x s of
(# s', a #) -> case k a of
Eval f -> f s'
rpar :: Strategy a
rpar x = Eval $ \s -> spark# x s
rparWith :: Strategy a -> Strategy a
rparWith s a = do l <- rpar r; return (case l of Lift x -> x)
where r = case s a of
Eval f -> case f realWorld# of
(# _, a' #) -> Lift a'
data Lift a = Lift a
using :: a -> Strategy a -> a
x `using` strat = runEval (strat x)
rdeepseq :: NFData a => Strategy a
rdeepseq x = do rseq (rnf x); return x
parList :: Strategy a -> Strategy [a]
parList strat = traverse (rparWith strat)
parMap :: Strategy b -> (a -> b) -> [a] -> [b]
parMap strat f = (`using` parList strat) . map f
runEval :: Eval a -> a
runEval (Eval x) = case x realWorld# of (# _, a #) -> a
rseq :: Strategy a
rseq x = Eval $ \s -> seq# x s
| ezyang/ghc | testsuite/tests/concurrent/T13615/Parallel.hs | bsd-3-clause | 1,432 | 0 | 14 | 435 | 592 | 304 | 288 | 39 | 1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.SVGPoint
(matrixTransform, matrixTransform_, setX, getX, setY, getY,
SVGPoint(..), gTypeSVGPoint)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPoint.matrixTransform Mozilla SVGPoint.matrixTransform documentation>
matrixTransform ::
(MonadDOM m) => SVGPoint -> SVGMatrix -> m SVGPoint
matrixTransform self matrix
= liftDOM
((self ^. jsf "matrixTransform" [toJSVal matrix]) >>=
fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPoint.matrixTransform Mozilla SVGPoint.matrixTransform documentation>
matrixTransform_ :: (MonadDOM m) => SVGPoint -> SVGMatrix -> m ()
matrixTransform_ self matrix
= liftDOM (void (self ^. jsf "matrixTransform" [toJSVal matrix]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPoint.x Mozilla SVGPoint.x documentation>
setX :: (MonadDOM m) => SVGPoint -> Float -> m ()
setX self val = liftDOM (self ^. jss "x" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPoint.x Mozilla SVGPoint.x documentation>
getX :: (MonadDOM m) => SVGPoint -> m Float
getX self
= liftDOM (realToFrac <$> ((self ^. js "x") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPoint.y Mozilla SVGPoint.y documentation>
setY :: (MonadDOM m) => SVGPoint -> Float -> m ()
setY self val = liftDOM (self ^. jss "y" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPoint.y Mozilla SVGPoint.y documentation>
getY :: (MonadDOM m) => SVGPoint -> m Float
getY self
= liftDOM (realToFrac <$> ((self ^. js "y") >>= valToNumber))
| ghcjs/jsaddle-dom | src/JSDOM/Generated/SVGPoint.hs | mit | 2,558 | 0 | 12 | 349 | 671 | 394 | 277 | 38 | 1 |
--
-- (C)opyright 2007 Ricardo Martins <ricardo at scarybox dot net>
-- Licensed under the MIT/X11 License.
-- See LICENSE file for license details.
--
module Main where
import System (getArgs)
import Text.Regex.Posix
import StrictIO
import Test.QuickCheck
-- strip the email message from those stupid signatures
mangle :: [String] -> [String]
mangle [] = []
mangle (x:xs) =
-- when/if a signature is found
if (x =~ "^> (--|__)" :: Bool)
-- filter quoted lines (that is, lines starting with ">")
then filter (\a -> not (a =~ "^>" :: Bool)) xs
else
x : mangle xs
-- removes duplicate adjacent elements
uniq_adj :: (Eq a) => [a] -> [a]
uniq_adj [] = []
uniq_adj [x] = [x]
uniq_adj (x:y:z) =
if (x == y) then uniq_adj (y:z)
else x : uniq_adj (y : z)
prop_uniq_adj xs = length (uniq_adj xs) <= length xs
-- removes trailing quotes after removing signatures
rmt :: [String] -> [String]
rmt [] = []
rmt [x] = [x]
rmt (x:y:z) =
if (x =~ "^>( )?$" :: Bool) && (y == "") then y:z
else x : rmt (y:z)
prop_rmt xs = length (rmt xs) <= length xs
main :: IO ()
main = do
[file] <- getArgs
email <- readFileStrict file
writeFile file $ unlines $ rmt $ uniq_adj $ mangle $ lines email
| meqif/tools | hashishin/hashishin.hs | mit | 1,249 | 0 | 12 | 301 | 456 | 248 | 208 | 30 | 2 |
module DevelMain where
import Prelude
import Application (getApplicationRepl, shutdownApp)
import Control.Exception (finally)
import Control.Monad ((>=>))
import Control.Concurrent
import Data.IORef
import Foreign.Store
import Network.Wai.Handler.Warp
import GHC.Word
update :: IO ()
update = do
mtidStore <- lookupStore tidStoreNum
case mtidStore of
-- no server running
Nothing -> do
done <- storeAction doneStore newEmptyMVar
tid <- start done
_ <- storeAction (Store tidStoreNum) (newIORef tid)
return ()
-- server is already running
Just tidStore -> restartAppInNewThread tidStore
where
doneStore :: Store (MVar ())
doneStore = Store 0
restartAppInNewThread :: Store (IORef ThreadId) -> IO ()
restartAppInNewThread tidStore = modifyStoredIORef tidStore $ \tid -> do
killThread tid
withStore doneStore takeMVar
readStore doneStore >>= start
start :: MVar () -> IO ThreadId
start done = do
(port, site, app) <- getApplicationRepl
forkIO (finally (runSettings (setPort port defaultSettings) app)
-- Note that this implies concurrency
-- between shutdownApp and the next app that is starting.
-- Normally this should be fine
(putMVar done () >> shutdownApp site))
shutdown :: IO ()
shutdown = do
mtidStore <- lookupStore tidStoreNum
case mtidStore of
Nothing -> putStrLn "no Yesod app running"
Just tidStore -> do
withStore tidStore $ readIORef >=> killThread
putStrLn "Yesod app is shutdown"
tidStoreNum :: Word32
tidStoreNum = 1
modifyStoredIORef :: Store (IORef a) -> (a -> IO a) -> IO ()
modifyStoredIORef store f = withStore store $ \ref -> do
v <- readIORef ref
f v >>= writeIORef ref
| pbrisbin/tee-io | app/DevelMain.hs | mit | 1,883 | 0 | 15 | 526 | 519 | 255 | 264 | 46 | 2 |
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE DataKinds #-}
module Data.HighJson
( -- * A json specification for any type
HighSpec(..), SpecType(..)
-- * Construct specifications for records
, recSpec, RecordTypeSpec, reqField, (.=), optField, (.=?)
-- * Construct specifications for sum types
, sumSpec, SumTypeSpec, sumOpt, (.->)
-- * Construct specifications for enum types
, enumSpec, EnumTypeSpec, enumOpt, (@->)
-- * Shared between specifications for simplicity
, IsDataSpec(..), (:&)(..)
-- * Generate json serializers/encoders and parsers from specs
, jsonSerializer, jsonEncoder, jsonParser
-- * Specification structures
, BodySpec(..)
, RecordField(..), RecordSpec(..), RecordFields(..)
, SumOption(..), SumSpec(..), SumOptions(..)
, EnumOption(..), EnumSpec(..)
-- * Aeson reexports
, ToJSON(..), FromJSON(..)
-- * Implementation detail structures
, PhantomEnumContainer(..), CombinableContainer(..)
)
where
import Data.HighJson.Types
import Control.Lens hiding ((.=))
import Data.Aeson ((.:), (.:?), FromJSON(..), ToJSON(..))
import Data.Typeable
import qualified Data.HVect as HV
import qualified Data.Text as T
-- | Combination of two local specifications. For records, these are fields, for sum types and enums
-- these are the options.
data a :& b
= a :& b
deriving (Typeable, Eq, Show, Functor, Traversable, Foldable, Bounded)
infixr 8 :&
instance (Semigroup a, Semigroup b) => Semigroup (a :& b) where
(a :& b) <> (a' :& b') = (a <> a') :& (b <> b')
instance (Monoid a, Monoid b) => Monoid (a :& b) where
mempty = mempty :& mempty
-- | A monoidal type class that respects type level lists associated to the bodies
class CombinableContainer t where
combineContainer :: t a (as :: [*]) -> t a (bs :: [*]) -> t a (HV.Append as bs)
instance CombinableContainer RecordFields where
combineContainer = recAppend
instance CombinableContainer SumOptions where
combineContainer = sumAppend
instance CombinableContainer PhantomEnumContainer where
combineContainer (PhantomEnumContainer x) (PhantomEnumContainer y) =
PhantomEnumContainer $ x ++ y
-- | A type class that allows a unified notation for records and sum types. Build specifications
-- using '(:&)' and '(.=)', '(.=?)', '(.->)' or '(@->)'
class IsDataSpec t where
type DFields t :: [*]
type DType t
type DContainer t :: * -> [*] -> *
compileRec :: t -> (DContainer t) (DType t) (DFields t)
instance IsDataSpec (RecordField t f) where
type DFields (RecordField t f) = (f ': '[])
type DType (RecordField t f) = t
type DContainer (RecordField t f) = RecordFields
compileRec x = x :+: RFEmpty
instance IsDataSpec (SumOption t f) where
type DFields (SumOption t f) = (f ': '[])
type DType (SumOption t f) = t
type DContainer (SumOption t f) = SumOptions
compileRec x = x :|: SOEmpty
newtype PhantomEnumContainer t (ts :: [*])
= PhantomEnumContainer { unPhantomEnumContainer :: [EnumOption t] }
instance IsDataSpec (EnumOption t) where
type DFields (EnumOption t) = (() ': '[])
type DType (EnumOption t) = t
type DContainer (EnumOption t) = PhantomEnumContainer
compileRec x = PhantomEnumContainer [x]
instance (IsDataSpec x, IsDataSpec y, DType x ~ DType y, DContainer x ~ DContainer y, CombinableContainer (DContainer x)) => IsDataSpec (x :& y) where
type DFields (x :& y) = HV.Append (DFields x) (DFields y)
type DType (x :& y) = DType x
type DContainer (x :& y) = DContainer x
compileRec (x :& y) = combineContainer (compileRec x) (compileRec y)
recAppend :: RecordFields t as -> RecordFields t bs -> RecordFields t (HV.Append as bs)
recAppend RFEmpty bs = bs
recAppend (a :+: as) bs = a :+: (as `recAppend` bs)
sumAppend :: SumOptions t as -> SumOptions t bs -> SumOptions t (HV.Append as bs)
sumAppend SOEmpty bs = bs
sumAppend (a :|: as) bs = a :|: (as `sumAppend` bs)
-- | A required json field. The key must be present in the json.
reqField :: FromJSON f => T.Text -> (t -> f) -> RecordField t f
reqField jsonKey g =
RecordField
{ rf_jsonKey = jsonKey
, rf_optional = False
, rf_jsonLoader = (.:)
, rf_get = g
}
-- | Alias for 'reqField'
(.=) :: FromJSON f => T.Text -> (t -> f) -> RecordField t f
jsonKey .= reader = reqField jsonKey reader
-- | An optional json field.
optField :: FromJSON f => T.Text -> (t -> Maybe f) -> RecordField t (Maybe f)
optField jsonKey g =
RecordField
{ rf_jsonKey = jsonKey
, rf_optional = True
, rf_jsonLoader = (.:?)
, rf_get = g
}
-- | Alias for 'optField'
(.=?) :: FromJSON f => T.Text -> (t -> Maybe f) -> RecordField t (Maybe f)
name .=? reader = optField name reader
-- | An option of a sum type
sumOpt :: T.Text -> Prism' t o -> SumOption t o
sumOpt jsonKey p =
SumOption
{ so_jsonKey = jsonKey
, so_prism = p
}
-- | Alias for 'sumOpt'
(.->) :: T.Text -> Prism' t o -> SumOption t o
jsonKey .-> p = sumOpt jsonKey p
-- | An option of a classic enum
enumOpt :: T.Text -> Prism' t () -> EnumOption t
enumOpt jsonKey p =
EnumOption
{ eo_jsonKey = jsonKey
, eo_prism = p
}
-- | Alias for 'enumOpt'
(@->) :: T.Text -> Prism' t () -> EnumOption t
jsonKey @-> p = enumOpt jsonKey p
-- | A specification for a record
type RecordTypeSpec t flds = HighSpec t 'SpecRecord flds
-- | The specification for a record. Contains a name, an optional description,
-- the constructor and a description how to parse and serialize fields respecting
-- a given json key.
recSpec ::
(IsDataSpec q, DContainer q ~ RecordFields)
=> T.Text -> Maybe T.Text -> HV.HVectElim (DFields q) (DType q)
-> q
-> RecordTypeSpec (DType q) (DFields q)
recSpec name mDesc mk fields =
HighSpec
{ hs_name = name
, hs_description = mDesc
, hs_bodySpec = BodySpecRecord $ RecordSpec (HV.uncurry mk) (compileRec fields)
}
-- | A specification for an arbitrary sum type
type SumTypeSpec t flds = HighSpec t 'SpecSum flds
-- | The specification for a sum type. Contains a name, an optional description
-- and a mapping from all constructor (prims) to their respective json fields
sumSpec ::
(IsDataSpec q, DContainer q ~ SumOptions)
=> T.Text -> Maybe T.Text -> q -> SumTypeSpec (DType q) (DFields q)
sumSpec name mDesc opts =
HighSpec
{ hs_name = name
, hs_description = mDesc
, hs_bodySpec = BodySpecSum $ SumSpec (compileRec opts)
}
-- | A specification for a classic enum
type EnumTypeSpec t flds = HighSpec t 'SpecEnum flds
-- | The specification for a classic enum type. Contains a name, an optional description
-- and a mapping from all constructors to ther counterpart json string names.
enumSpec ::
(IsDataSpec q, DContainer q ~ PhantomEnumContainer)
=> T.Text -> Maybe T.Text -> q -> EnumTypeSpec (DType q) (DFields q)
enumSpec name mDesc opts =
HighSpec
{ hs_name = name
, hs_description = mDesc
, hs_bodySpec = BodySpecEnum $ EnumSpec (unPhantomEnumContainer $ compileRec opts)
}
| agrafix/highjson | highjson/src/Data/HighJson.hs | mit | 7,271 | 0 | 12 | 1,580 | 2,153 | 1,199 | 954 | -1 | -1 |
------------------------------------------------------------------------------------------------------------------------------
-- ROSE TREES, FUNCTORS, MONOIDS, FOLDABLES
------------------------------------------------------------------------------------------------------------------------------
data Rose a = a :> [Rose a] deriving Show
-- ===================================
-- Ex. 0-2
-- ===================================
root :: Rose a -> a
root (x :> y) = x
children :: Rose a -> [Rose a]
children (x :> y) = y
xs = 0 :> [1 :> [2 :> [3 :> [4 :> [], 5 :> []]]], 6 :> [], 7 :> [8 :> [9 :> [10 :> []], 11 :> []], 12 :> [13 :> []]]]
ex2 = root . head . children . head . children . head . drop 2 $ children xs
-- ===================================
-- Ex. 3-7
-- ===================================
size :: Rose a -> Int
size (x :> []) = 1
size (x :> y) = (sum $ map size y) + 1
leaves :: Rose a -> Int
leaves (x :> []) = 1
leaves (x :> y) = (sum $ map leaves y)
ex7 = (*) (leaves . head . children . head . children $ xs) (product . map size . children . head . drop 2 . children $ xs)
-- ===================================
-- Ex. 8-10
-- ===================================
instance Functor Rose where
fmap f (x :> y) = f x :> (map (fmap f) y)
ex10 = round . root . head . children . fmap (\x -> if x > 0.5 then x else 0) $ fmap (\x -> sin(fromIntegral x)) xs
-- ===================================
-- Ex. 11-13
-- ===================================
class Monoid m where
mempty :: m
mappend :: m -> m -> m
newtype Sum a = Sum a
newtype Product a = Product a
instance Num a => Monoid (Sum a) where
mempty = Sum 0
mappend x y = Sum $ (unSum x) + (unSum y)
instance Num a => Monoid (Product a) where
mempty = Product 1
mappend x y = Product $ (unProduct x) * (unProduct y)
unSum :: Sum a -> a
unSum (Sum a) = a
unProduct :: Product a -> a
unProduct (Product a) = a
num1 = mappend (mappend (Sum 2) (mappend (mappend mempty (Sum 1)) mempty)) (mappend (Sum 2) (Sum 1))
num2 = mappend (Sum 3) (mappend mempty (mappend (mappend (mappend (Sum 2) mempty) (Sum (-1))) (Sum 3)))
ex13 = unSum (mappend (Sum 5) (Sum (unProduct (mappend (Product (unSum num2)) (mappend (Product (unSum num1)) (mappend mempty (mappend (Product 2) (Product 3))))))))
-- ===================================
-- Ex. 14-15
-- ===================================
class Functor f => Foldable f where
fold :: Monoid m => f m -> m
foldMap :: Monoid m => (a -> m) -> (f a -> m)
foldMap g h = fold $ fmap g h
instance Foldable Rose where
fold (x :> []) = x `mappend` mempty
fold (x :> y) = x `mappend` (foldr mappend mempty $ map fold y)
sumxs = Sum 0 :> [Sum 13 :> [Sum 26 :> [Sum (-31) :> [Sum (-45) :> [], Sum 23 :> []]]], Sum 27 :> [], Sum 9 :> [Sum 15 :> [Sum 3 :> [Sum (-113) :> []], Sum 1 :> []], Sum 71 :> [Sum 55 :> []]]]
ex15 = unSum (mappend (mappend (fold sumxs) (mappend (fold . head . drop 2 . children $ sumxs) (Sum 30))) (fold . head . children $ sumxs))
-- ===================================
-- Ex. 16-18
-- ===================================
ex17 = unSum (mappend (mappend (foldMap (\x -> Sum x) xs) (mappend (foldMap (\x -> Sum x) . head . drop 2 . children $ xs) (Sum 30))) (foldMap (\x -> Sum x) . head . children $ xs))
ex18 = unSum (mappend (mappend (foldMap (\x -> Sum x) xs) (Sum (unProduct (mappend (foldMap (\x -> Product x) . head . drop 2 . children $ xs) (Product 3))))) (foldMap (\x -> Sum x) . head . children $ xs))
-- ===================================
-- Ex. 19-21
-- ===================================
fproduct, fsum :: (Foldable f, Num a) => f a -> a
fsum = unSum . foldMap Sum
fproduct = unProduct . foldMap Product
ex21 = ((fsum . head . drop 1 . children $ xs) + (fproduct . head . children . head . children . head . drop 2 . children $ xs)) - (fsum . head . children . head . children $ xs)
| mitochon/hexercise | src/mooc/fp101/Lab6RoseTemplate.hs | mit | 3,871 | 17 | 24 | 775 | 1,894 | 954 | 940 | 50 | 2 |
-- Generated by protobuf-simple. DO NOT EDIT!
module Types.StringMsg where
import Control.Applicative ((<$>))
import Prelude ()
import qualified Data.ProtoBufInt as PB
newtype StringMsg = StringMsg
{ value :: PB.Text
} deriving (PB.Show, PB.Eq, PB.Ord)
instance PB.Default StringMsg where
defaultVal = StringMsg
{ value = PB.defaultVal
}
instance PB.Mergeable StringMsg where
merge a b = StringMsg
{ value = PB.merge (value a) (value b)
}
instance PB.Required StringMsg where
reqTags _ = PB.fromList [PB.WireTag 1 PB.LenDelim]
instance PB.WireMessage StringMsg where
fieldToValue (PB.WireTag 1 PB.LenDelim) self = (\v -> self{value = PB.merge (value self) v}) <$> PB.getString
fieldToValue tag self = PB.getUnknown tag self
messageToFields self = do
PB.putString (PB.WireTag 1 PB.LenDelim) (value self)
| sru-systems/protobuf-simple | test/Types/StringMsg.hs | mit | 848 | 0 | 13 | 157 | 296 | 159 | 137 | 20 | 0 |
import Control.Monad.Reader
data GameState = NotOver | FirstPlayerWin | SecondPlayerWin | Tie
data Game position
= Game {
getNext :: position -> [position],
getState :: position -> GameState
}
getNext' :: position -> Reader (Game position) [position]
getNext' position
= do game <- ask
return $ getNext game position
getState' :: position -> Reader (Game position) GameState
getState' position
= do game <- ask
return $ getState game position
negamax :: Double -> position -> Reader (Game position) Double
negamax color position
= do state <- getState' position
case state of
FirstPlayerWin -> return color
SecondPlayerWin -> return $ negate color
Tie -> return 0
NotOver -> do possible <- getNext' position
values <- mapM ((liftM negate) . negamax (negate color)) possible
return $ maximum values
| Muzietto/geiesmonads | es6/Megamax.hs | mit | 990 | 32 | 10 | 318 | 278 | 146 | 132 | -1 | -1 |
module TicTacToe.ResultSpec where
import Test.Hspec
import qualified TicTacToe.Fixtures as Fixtures
import qualified TicTacToe.Result as Result
spec :: Spec
spec =
describe "TicTacToe.Result" $ do
context "for in play board" $ do
let board = Fixtures.inPlayBoard
let result = Result.fromBoard board
describe "show" $
it "returns a string" $
show result `shouldBe` "TerminalGame is in play"
describe "getBoard" $
it "returns the board" $
Result.board result `shouldBe` board
context "for cross won board" $ do
let board = Fixtures.xWonBoard
let result = Result.fromBoard board
describe "show" $
it "returns a string" $
show result `shouldBe` "Crosses win"
describe "getBoard" $
it "returns the board" $
Result.board result `shouldBe` board
context "for naught won board" $ do
let board = Fixtures.oWonBoard
let result = Result.fromBoard board
describe "show" $
it "returns a string" $
show result `shouldBe` "Naughts win"
describe "getBoard" $
it "returns the board" $
Result.board result `shouldBe` board
context "for drawn oard" $ do
let board = Fixtures.drawnBoard
let result = Result.fromBoard board
describe "show" $
it "returns a string" $
show result `shouldBe` "Draw"
describe "getBoard" $
it "returns the board" $
Result.board result `shouldBe` board
-- context "fromBoard" $ do
-- it "is in play if there are empty cells and no winning lines" $ do
-- let board = Board [ Naught, Cross, Empty
-- , Naught, Cross, Cross
-- , Cross, Naught, Naught
-- ]
-- GameLogic.getGameState board `shouldBe` InPlay
-- it "is a draw if all cells are taken and there are no winning lines" $ do
-- let board = Board [ Naught, Cross, Naught
-- , Naught, Cross, Cross
-- , Cross, Naught, Naught
-- ]
-- GameLogic.getGameState board `shouldBe` Draw
-- it "has been won be crosses if the there is a winning line of crosses" $ do
-- let board = Board [ Naught, Cross, Naught
-- , Cross, Cross, Cross
-- , Empty, Naught, Naught
-- ]
-- GameLogic.getGameState board `shouldBe` Winner Crosses
-- it "has been won be naughts if the there is a winning line of naughts" $ do
-- let board = Board [ Naught, Cross, Naught
-- , Cross, Naught, Cross
-- , Empty, Naught, Naught
-- ]
-- GameLogic.getGameState board `shouldBe` Winner Naughts
| tomphp/haskell-tictactoe | test/TicTacToe/ResultSpec.hs | mit | 2,912 | 51 | 6 | 1,065 | 353 | 201 | 152 | 43 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, OverloadedStrings #-}
module Reddit.Types where
import Control.Applicative
import Control.Monad
import Data.Aeson (FromJSON (..), ToJSON, (.:))
import Data.Text (Text)
newtype DisplayName = DisplayName { unDisplayName :: Text }
deriving (Show,Read,FromJSON,ToJSON)
newtype Title = Title { unTitle :: Text }
deriving (Show,Read,FromJSON,ToJSON)
newtype Fullname = Fullname { unFullname :: Text }
deriving (Show,Read,FromJSON,ToJSON)
data Subreddit = Subreddit
{ displayName :: DisplayName
, fullname :: Fullname
}
deriving (Show,Read)
data Thing = Thing
{ kind :: Text
, thingData :: Subreddit
}
deriving (Show,Read)
---------------------------------------------
-- Instancies
---------------------------------------------
instance FromJSON Subreddit where
parseJSON = parseJSON >=> go
where
go o = Subreddit
<$> (DisplayName <$> (o .: "display_name"))
<*> (Fullname <$> (o .: "name"))
instance FromJSON Thing where
parseJSON = parseJSON >=> go
where
go o = Thing
<$> o .: "kind"
<*> o .: "data"
| sifisifi/subscribe-jpsubreddits | src/Reddit/Types.hs | mit | 1,201 | 0 | 13 | 301 | 320 | 186 | 134 | 30 | 0 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
-- http://stackoverflow.com/questions/27591266/telling-cabal-where-the-main-module-is
module Betfair.StreamingAPI.API.StreamingState
( StreamingState(..)
, defaultStreamingState
, SessionToken
) where
import Data.Aeson.TH (Options (omitNothingFields),
defaultOptions,
deriveJSON)
import qualified Data.IntMap.Strict as IntMap
import Data.Time
import Protolude
import Text.PrettyPrint.GenericPretty
import Betfair.APING
import Betfair.StreamingAPI.API.Request
type SessionToken = Token
data StreamingState = StreamingState
{ ssRequests :: IntMap.IntMap Request
, ssIdCounter :: Int
, ssSessionToken :: SessionToken
, ssAppKey :: AppKey
, ssNeedHumanHelp :: Bool
, ssLastMarketSubscriptionMessageSentAt :: UTCTime
} deriving (Eq, Read, Show, Generic, Pretty)
defaultStreamingState :: UTCTime -> StreamingState
defaultStreamingState time = StreamingState IntMap.empty 1 "" "" False time
$(deriveJSON defaultOptions {omitNothingFields = True} ''StreamingState)
| joe9/streaming-betfair-api | src/Betfair/StreamingAPI/API/StreamingState.hs | mit | 1,457 | 0 | 10 | 448 | 218 | 134 | 84 | 30 | 1 |
import System.Random
import System.IO.Unsafe
import Data.List
import Data.Char
--Problem 1: Retrieve the last element from a list
myLast :: [a] -> a
myLast [x] = x
myLast (_:xs) = myLast xs
--
--Problem 2: Retrieve the second to last element from a list
myButLast :: [a] -> a
myButLast xs = myLast (init xs)
--
--Problem 3: Find the kth element of a list
-- if k > length, returns the last element
elementAt :: [a] -> Int -> a
elementAt xs k = myLast (take k xs)
--
--Problem 4: Find the number of elements of a list
myLength :: [a] -> Int
myLength [] = 0
myLength (_:xs) = myLength xs + 1
--
--Problem 5: Reverse a list
myReverse :: [a] -> [a]
myReverse [] = []
myReverse (x:xs) = myReverse xs ++ [x]
--
--Problem 6: Find out whether a list is a palindrome
myPalindrome :: (Eq a) => [a] -> Bool
myPalindrome [] = True
myPalindrome [_] = True
myPalindrome xs = myPalindrome (init (tail xs)) && head xs == last xs
--
----Problem 7: Flatten a nested list structure
--data NestedList a = Elem a | List [NestedList a]
--myFlatten :: NestedList a -> [a]
--myFlatten (Elem n) = [n]
--Problem 8: Eliminate consecutive duplicates of list elements
myCompress :: (Eq a) => [a] -> [a]
myCompress [] = []
myCompress [x] = [x]
myCompress (a:b:xs)
| a == b = (myCompress (a:xs))
| otherwise = a:(myCompress (b:xs))
--
--Problem 9: Pack consecutive duplicates of list elements into sublists
myPack :: (Eq a) => [a] -> [[a]]
myPack [] = []
myPack xs =
let numDupes = countFirstDuplicates xs
in [take numDupes xs] ++ myPack (drop numDupes xs)
countFirstDuplicates :: (Eq a) => [a] -> Int
countFirstDuplicates [] = 0
countFirstDuplicates [a] = 1
countFirstDuplicates (a:b:xs)
| a == b = countFirstDuplicates (b:xs) + 1
| otherwise = 1
--
--Problem 10: Run-length encoding of a list
myEncode :: (Eq a) => [a] -> [(Int, a)]
myEncode [] = []
myEncode xs =
let packedHead = head (myPack xs)
headLength = length packedHead
in [(length packedHead, head packedHead)] ++ myEncode (drop headLength xs)
--Problem 11: Modified run-length encoding
--Problem 12: Decode a run-length encoded list
--Problem 13: Run-length encoding of a list (direct solution)
--Problem 14: Duplicate the elements of a list
myDupli :: [a] -> [a]
myDupli [] = []
myDupli (x:xs) = x:x:myDupli xs
--
--Problem 15: Replicate the elements of a list a given number of times
myRepli :: [a] -> Int -> [a]
myRepli _ 0 = []
myRepli [] n = []
myRepli (x:xs) n =
x:myRepli [x] (n - 1) ++ (myRepli xs n)
--
--Problem 16: Drop every N'th element from a list
myDropEvery :: [a] -> Int -> [a]
myDropEvery [] _ = []
myDropEvery xs n
| length xs < n = xs
| otherwise = init (take n xs) ++ myDropEvery (drop n xs) n
--
--Problem 17: Split a list into two parts; the length of the first part is given
--Do not use any predefined predicates.
mySplit :: [a] -> Int -> ([a], [a])
mySplit xs n = (myTake n xs, myDrop n xs)
myTake :: Int -> [a] -> [a]
myTake _ [] = []
myTake 0 _ = []
myTake n (x:xs) =
x:myTake (n - 1) xs
myDrop :: Int -> [a] -> [a]
myDrop _ [] = []
myDrop 0 xs = xs
myDrop n (x:xs) =
myDrop (n - 1) xs
--
--Problem 18: Extract a slice from a list
mySlice :: [a] -> Int -> Int -> [a]
mySlice xs i k
| k <= i = []
| otherwise = take (k - i + 1) (drop (i - 1) xs)
--
--Problem 19: Rotate a list N places to the left
myRotate :: [a] -> Int -> [a]
myRotate xs 0 = xs
myRotate xs n
| n > 0 = myRotate (tail xs ++ [head xs]) (n - 1)
| otherwise = myRotate (last xs : init xs) (n + 1)
--
--Problem 20: Remove the K'th element from a list.
-- counting from 1
myRemoveAt :: [a] -> Int -> (a, [a])
myRemoveAt xs n = (xs !! (n - 1), take (n - 1) xs ++ drop n xs)
--
--Problem 21: Insert an element at a given position into a list.
-- counting from 1
-- If the number is outside the range, insert at the proper end.
myInsertAt :: a -> [a] -> Int -> [a]
myInsertAt x xs n = take (n - 1) xs ++ [x] ++ drop (n - 1) xs
--
--Problem 22: Create a list containing all integers within a given range.
myRange :: Int -> Int -> [Int]
myRange a b = [a..b]
--
--Problem 23: Extract a given number of randomly selected elements from a list.
myRandomSelect :: [a] -> Int -> [a]
myRandomSelect _ 0 = []
myRandomSelect [] _ = []
myRandomSelect xs n =
let index = myRandom 0 (length xs - 1)
in [xs !! index] ++ myRandomSelect (snd (myRemoveAt xs (index + 1))) (n - 1)
--
--Problem 24: Lotto: Draw N different random numbers from the set 1..M.
myRandomSelectFromRange :: Int -> Int -> [Int]
myRandomSelectFromRange n maxLim =
myRandomSelect [1..maxLim] n
--Problem 25: Generate a random permutation of the elements of a list.
myRandomPermute :: [a] -> [a]
myRandomPermute xs = myRandomSelect xs (length xs)
--
myRandom :: Int -> Int -> Int
--Don't do this, evidently. Must figure out IO.
myRandom a b = unsafePerformIO (randomRIO (a, b))
--Problem 26: Generate the combinations of K distinct objects chosen from the N elements of a list
--Problem 27: Group the elements of a set into disjoint subsets.
--Problem 28a
myLSort :: [[a]] -> [[a]]
myLSort [] = []
myLSort (x:xs) =
let smallerThanHead = myLSort[a | a <- xs, length a <= length x]
biggerThanHead = myLSort[a | a <- xs, length a > length x]
in smallerThanHead ++ [x] ++ biggerThanHead
--
----Problem 28b
--myLfSort :: [[a]] -> [[a]]
--myLfSort [] = []
--myLfSort (x:xs) =
-- let smallerThanHead = myLfSort[a | a <- xs, countFrequencies (length a) (x:xs) <= countFrequencies lenx (x:xs)]
-- biggerThanHead = myLfSort[a | a <- xs, countFrequencies (length a) (x:xs) > countFrequencies lenx (x:xs)]
-- in smallerThanHead ++ [x] ++ biggerThanHead
-- where lenx = length x
----myLfSort (x:xs) =
---- let
--countFrequencies :: Int -> [[a]] -> Int
--countFrequencies len list =
-- length [a | a <- list, length a == len]
--
--Problem 29: DOES NOT EXIST
--Problem 30: DOES NOT EXIST
--Problem 31: Determine whether a given integer number is prime.
myIsPrime :: Int -> Bool
myIsPrime 1 = False
myIsPrime 2 = True
myIsPrime x =
if numDivisors x == 1 then True else False
numDivisors :: Int -> Int
numDivisors x =
length [a | a <- [1..ceiling (sqrt (fromIntegral x))]{-, abs (a `mod` 6) == 1-}, x `mod` a == 0]
--
--Problem 32: Determine the greatest common divisor of two positive integer numbers. Use Euclid's algorithm.
myGCD :: Int -> Int -> Int
myGCD a b = if b == 0 then a else myGCD b (a `mod` b)
--
--Problem 33: Determine whether two positive integer numbers are coprime.
myCoprime :: Int -> Int -> Bool
myCoprime a b = if myGCD a b == 1 then True else False
--
--Problem 34: Calculate Euler's totient function phi(m).
myTotient :: Int -> Int
myTotient n = length [a | a <- [2..n], myCoprime a n] + 1
--
--Problem 35: Determine the prime factors of a given positive integer.
--Construct a flat list containing the prime factors in ascending order.
myPrimeFactors :: Int -> [Int]
myPrimeFactors n
| n < 0 = myPrimeFactors (abs n)
| n < 2 = []
| otherwise = a : myPrimeFactors (quot n a)
-- should start the generation later
where a = head [a | a <- generatePrimeList, n `mod` a == 0]
--
--Problem 36: Determine the prime factors of a given positive integer.
--Construct a list containing the prime factors and their multiplicity.
myPrimeFactorsMult :: Int -> [(Int, Int)]
myPrimeFactorsMult n =
[(snd a, fst a) | a <- myEncode (myPrimeFactors n)]
--
--Problem 37: Calculate Euler's totient function phi(m) (improved).
myPhi :: Int -> Int
myPhi n =
let primeFactors = myPrimeFactorsMult n
in product (map totientPart primeFactors)
totientPart :: (Int, Int) -> Int
totientPart (p1, m1) = (p1 - 1) * p1 ^ (m1 - 1)
--Problem 38: Compare the two methods of calculating Euler's totient function.
{- myPhi is much faster than myTotient, especially considering that
I hadn't even pre-generated the primelist and I have to
regenerate it every time. -}
--
--Problem 39: Given a range of integers by its lower and upper limit,
-- construct a list of all prime numbers in that range.
myPrimesRange :: Int -> Int -> [Int]
myPrimesRange minLimit maxLimit =
dropWhile (< minLimit) (takeWhile (< maxLimit) generatePrimeList)
--
--Problem 40: Goldbach's conjecture.
myGoldbach :: Int -> (Int, Int)
myGoldbach n =
if even n && n > 2 then
let primes = myPrimesRange 2 (n - 1)
in head [(a, n - a) | a <- primes, (n - a) `elem` primes]
else error "Even numbers > 2 only!"
--
--Problem 41: Given a range of integers by its lower and upper limit,
--print a list of all even numbers and their Goldbach composition.
myGoldbachList :: Int -> Int -> [(Int, Int, Int)]
myGoldbachList minLimit maxLimit =
[(a, fst gb, snd gb) | a <- [minLim, minLim + 2..maxLimit], let gb = myGoldbach a]
where minLim = minLimit + minLimit `mod` 2
myGoldbachList' :: Int -> Int -> Int -> [(Int, Int, Int)]
myGoldbachList' minLimit maxLimit primesOver =
[(a, b, c) | (a, b, c) <- myGoldbachList minLimit maxLimit, b > primesOver, a > primesOver]
--
-- Helper to generate primes
generatePrimeList :: [Int]
generatePrimeList =
2:[a | a <- [1, 3..], myIsPrime a]
--
--Problem 42: DOES NOT EXIST
--Problem 43: DOES NOT EXIST
--Problem 44: DOES NOT EXIST
--Problem 45: DOES NOT EXIST
--Problem 46: NOT FINISHED
myNot :: Bool -> Bool
myNot False = True
myNot True = False
myAnd, myOr, myNand, myNor, myXor, myImpl, myEqu :: Bool -> Bool -> Bool
myAnd True True = True
myAnd _ _ = False
myOr False False = False
myOr _ _ = True
myNand a b = myNot (myAnd a b)
myNor a b = myNot (myOr a b)
myXor True True = False
myXor False False = False
myXor _ _ = True
myImpl a b = (myNot a) `myOr` b
myEqu True True = True
myEqu False False = True
myEqu _ _ = False
--Problem 90: Eight Queens
myNQueens :: Int -> [[Int]]
--each element of each list represents
--its position in the column. Assuming n is
--The number of queens to place as well as the
--dimension of the board.
myNQueens n = [x | x <- permutations [1..n], queensSafe x]
queensSafe :: [Int] -> Bool
queensSafe [] = True
queensSafe l@(x:xs)
| firstQueenSafe l == True = queensSafe xs
| otherwise = False
firstQueenSafe :: [Int] -> Bool
firstQueenSafe l@(x:xs) =
let unsafe1 = [x, x - 1 .. x - length l]
unsafe2 = [x, x + 1 .. x + length l]
zipped = zip (tail unsafe1) (tail l) ++ zip (tail unsafe2) (tail l)
countMatches = length [x | x <- zipped, fst x == snd x]
in if countMatches == 0 then True else False
--
--Problem 91: Knight's tour
--myKnightsTour :: Int -> (Int, Int) -> [(Int, Int)]
--myKnightsTour n end =
-- myKTRecursive n end (myCombineLists [1..8] [1..8]) []
--myKTRecursive :: Int -> (Int, Int) -> [(Int, Int)] -> [(Int, Int)]
--myKTRecursive n end mustreach path =
-- | length mustreach == 0 = path
-- | -- 4 recursive calls for possiblepaths
--Problem 95: English Number Words
fullWords :: Int -> String
fullWords n
| n < 10 = digitToString n
| otherwise = fullWords (quot (n - lastDigit) 10) ++ "-" ++ digitToString (mod n 10)
where lastDigit = mod n 10
digitToString x =
["zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine"] !! x
--
--Problem 96: Syntax checker
identifier :: String -> Bool
identifier "" = True
identifier (x:xs)
| isLetter x == True = identifier' xs
| otherwise = False
identifier' :: String -> Bool
identifier' "" = True
identifier' (x:xs)
| isLetter x = identifier' xs
| isDigit x = identifier' xs
| x == '-' && length xs > 0 && (isLetter (head xs) || isDigit (head xs)) = identifier' (tail xs)
| otherwise = False
--
myCombineLists :: [a] -> [a] -> [(a, a)]
myCombineLists a b =
[(fst x, snd x) | x <- (cycle a) `zip` (myRepli b (length a))]
mySum :: (Num a) => [a] -> a
mySum [] = 0
mySum (x:xs) = x + mySum xs
factorial :: (Integral a) => a -> a
factorial n = product [1..n]
myCountOccurences :: Eq a => a -> [a] -> Int
myCountOccurences _ [] = 0
myCountOccurences n (x:xs)
| x == n = 1 + myCountOccurences n xs
| otherwise = myCountOccurences n xs
mySort :: (Ord a) => [a] -> [a]
mySort [] = []
mySort (x:xs) =
let smallerThanHead = mySort[a | a <- xs, a <= x]
biggerThanHead = mySort[a | a <- xs, a > x]
in smallerThanHead ++ [x] ++ biggerThanHead
halveList :: [a] -> ([a], [a])
halveList [] = ([], [])
--len = (length a) `div` 2
halveList a =
let len = (length a) `div` 2
in (take len a, drop len a)
applyTwice :: (a -> a) -> a -> a
applyTwice f x = f (f x)
sumMultiples :: Int -> Int -> Int -> Int
sumMultiples limit mult1 mult2 =
sum [x | x <- [1..limit], x `mod` 3 == 0 || x `mod` 5 == 0]
--swapSignTwiceCalled :: Int -> Int
--swapSignTwiceCalled n
-- | even n && n > 0 = n + 1
-- | even n && n < 0 =
main :: IO()
main = print $ length $ myNQueens 10 | ostrowr/99-Haskell-Problems | 99problems.hs | mit | 13,000 | 0 | 14 | 2,991 | 4,356 | 2,336 | 2,020 | 232 | 2 |
module Main where
import Prelude hiding (lookup)
import Control.Applicative ((<$>))
import Control.Monad (join)
import Control.Concurrent.Object
import Control.Concurrent.STM
import qualified Control.Exception as E
import qualified Data.Map as Map
import Control.Monad.Trans.Cont (ContT)
import Control.Concurrent.Structured (liftIO)
import qualified Control.Concurrent.Structured as CS
--------------------------------------------------------------------------------
-- | Group
newtype Group = Group { unGroup :: Object GMessage GReply }
type GroupId = Int
type MemberId = Int
type MemberName = String
data GState = GState
{ groupId :: GroupId
, groupMembers :: Map.Map MemberId MemberName
}
data GMessage
= AddMember MemberId MemberName
| RemoveMember MemberId
| GetGroupId
| GetMember MemberId
| GetAllMembers
data GReply
= AddMemberR Bool
| RemoveMemberR Bool
| GetGroupIdR GroupId
| GetMemberR (Maybe MemberName)
| GetAllMembersR [MemberId]
deriving (Show)
instance ObjectLike IO Group where
type OMessage Group = GMessage
type OReply Group = GReply
type OClass Group = Class GMessage GReply
new cl = Group <$> new cl
(Group obj) ! msg = obj ! msg
(Group obj) !? msg = obj !? msg
kill (Group obj) = kill obj
instance ObjectLike (ContT r IO) Group where
type OMessage Group = GMessage
type OReply Group = GReply
type OClass Group = Class GMessage GReply
new cl = liftIO $ Group <$> new cl
(Group obj) ! msg = liftIO $ obj ! msg
(Group obj) !? msg = liftIO $ (liftIO <$> obj !? msg)
kill (Group obj) = liftIO $ kill obj
--------------------------------------------------------------------------------
newGroup :: GroupId -> IO Group
newGroup gid = new Class
{ classInitializer = return $ GState gid Map.empty
, classFinalizer = (\_st -> putStrLn "Cleanup")
, classCallbackModule = CallbackModule $ \self@Self{..} msg -> case msg of
AddMember mid name -> do
atomically $ modifyTVar' selfState
(\ gst -> gst { groupMembers = Map.insert mid name (groupMembers gst) })
return (AddMemberR True, self)
RemoveMember mid -> do
atomically $ modifyTVar' selfState
(\ gst -> gst { groupMembers = Map.delete mid (groupMembers gst) })
return (RemoveMemberR True, self)
GetGroupId -> do
gst <- atomically $ readTVar selfState
return (GetGroupIdR $ groupId gst, self)
GetMember mid -> do
gst <- atomically $ readTVar selfState
return (GetMemberR $ Map.lookup mid (groupMembers gst), self)
GetAllMembers -> do
mids :: [MemberId]
<- (map fst . Map.toList . groupMembers) <$> (atomically $ readTVar selfState)
return (GetAllMembersR mids, self)
}
main1 :: IO ()
main1 = do
let
gid :: Int
gid = 42
gr :: Group <- newGroup gid
gr ! AddMember 1 "Alice"
gr ! AddMember 2 "Bob"
gr ! AddMember 3 "Charlie"
gr ! AddMember 4 "Edward"
gr ! RemoveMember 2
gr ! RemoveMember 3
(join $ gr !? GetGroupId) >>= print
(join $ gr !? GetMember 1) >>= print
(join $ gr !? GetMember 2) >>= print
(join $ gr !? GetAllMembers) >>= print
kill gr
main2 :: IO ()
main2 = CS.runConcurrent $ do
let
gid :: Int
gid = 42
put :: String -> CS.Concurrent ()
put = liftIO . putStrLn
gr :: Group <- liftIO $ newGroup gid
gr ! AddMember 1 "Alice"
gr ! AddMember 2 "Bob"
gr ! AddMember 3 "Charlie"
gr ! AddMember 4 "Edward"
gr ! RemoveMember 2
gr ! RemoveMember 3
(join $ gr !? GetGroupId) >>= liftIO . print
(join $ gr !? GetMember 1) >>= liftIO . print
(join $ gr !? GetMember 2) >>= liftIO . print
(join $ gr !? GetAllMembers) >>= liftIO . print
kill gr
main :: IO ()
main = main1 >> main2
| ruicc/structured-concurrent-object | tests/group.hs | mit | 4,108 | 0 | 23 | 1,232 | 1,336 | 681 | 655 | -1 | -1 |
module Y2015.D20Spec (spec) where
import Y2015
import Test.Hspec
spec :: Spec
spec = parallel $
describe "Day 20" $
describe "withMinPresents" $
it "finds 70 presents at house four" $
withMinPresents 70 `shouldBe` 4
| tylerjl/adventofcode | test/Y2015/D20Spec.hs | mit | 258 | 0 | 10 | 76 | 63 | 34 | 29 | 9 | 1 |
module Cube where
import Graphics.Rendering.OpenGL
import Graphics.UI.GLUT
import Data.Tuple.HT
cube w = do
renderPrimitive Quads $ do
vertex $ Vertex3 w w w
vertex $ Vertex3 w w (-w)
vertex $ Vertex3 w (-w) (-w)
vertex $ Vertex3 w (-w) w
vertex $ Vertex3 w w w
vertex $ Vertex3 w w (-w)
vertex $ Vertex3 (-w) w (-w)
vertex $ Vertex3 (-w) w w
vertex $ Vertex3 w w w
vertex $ Vertex3 w (-w) w
vertex $ Vertex3 (-w) (-w) w
vertex $ Vertex3 (-w) w w
vertex $ Vertex3 (-w) w w
vertex $ Vertex3 (-w) w (-w)
vertex $ Vertex3 (-w) (-w) (-w)
vertex $ Vertex3 (-w) (-w) w
vertex $ Vertex3 w (-w) w
vertex $ Vertex3 w (-w) (-w)
vertex $ Vertex3 (-w) (-w) (-w)
vertex $ Vertex3 (-w) (-w) w
vertex $ Vertex3 w w (-w)
vertex $ Vertex3 w (-w) (-w)
vertex $ Vertex3 (-w) (-w) (-w)
vertex $ Vertex3 (-w) w (-w)
| fatuhoku/haskell-yampa-bouncing-ball | src/Cube.hs | mit | 896 | 0 | 13 | 262 | 556 | 275 | 281 | 30 | 1 |
module HtmlDocSpec where
import Prelude hiding (div)
import Test.Hspec
import HtmlDoc
import Dom
import Debug.Trace
div :: String -> [HtmlNode] -> HtmlNode
div id children = htmlNodeWithId "div" id children
spec :: Spec
spec = do
it "sets font size from CSS" $ do
let css = [ (parseSelector "#big", [FontSize (Pct 150)]) ]
let node = head $ children $ toDom $ htmlDoc css [ div "big" [] ]
fontSize (properties node) `shouldBe` 24
it "sets height from CSS" $ do
let css = [ (parseSelector "#big", [Height (Px 372)]) ]
let node = head $ children $ toDom $ htmlDoc css [ div "big" [] ]
height (properties node) `shouldBe` (Px 372)
| pbevin/toycss | test/HtmlDocSpec.hs | gpl-2.0 | 660 | 0 | 18 | 146 | 272 | 139 | 133 | 18 | 1 |
module Text.Pandoc.CrossRef.Util.CodeBlockCaptions
(
mkCodeBlockCaptions
) where
import Text.Pandoc.Definition
import Text.Pandoc.Shared (normalizeSpaces)
import Data.List (isPrefixOf, stripPrefix)
import Data.Maybe (fromMaybe)
import Text.Pandoc.CrossRef.References.Types
import Text.Pandoc.CrossRef.Util.Options
mkCodeBlockCaptions :: Options -> [Block] -> WS [Block]
mkCodeBlockCaptions opts x@(cb@(CodeBlock _ _):p@(Para _):xs)
= return $ fromMaybe x $ orderAgnostic opts $ p:cb:xs
mkCodeBlockCaptions opts x@(p@(Para _):cb@(CodeBlock _ _):xs)
= return $ fromMaybe x $ orderAgnostic opts $ p:cb:xs
mkCodeBlockCaptions _ x = return x
orderAgnostic :: Options -> [Block] -> Maybe [Block]
orderAgnostic opts (Para ils:CodeBlock (label,classes,attrs) code:xs)
| codeBlockCaptions opts
, Just caption <- getCodeBlockCaption ils
, not $ null label
, "lst" `isPrefixOf` label
= return $ Div (label,"listing":classes, [])
[Para caption, CodeBlock ([],classes,attrs) code] : xs
orderAgnostic _ _ = Nothing
getCodeBlockCaption :: [Inline] -> Maybe [Inline]
getCodeBlockCaption ils
| Just caption <- [Str "Listing:",Space] `stripPrefix` normalizeSpaces ils
= Just caption
| Just caption <- [Str ":",Space] `stripPrefix` normalizeSpaces ils
= Just caption
| otherwise
= Nothing
| infotroph/pandoc-crossref | lib/Text/Pandoc/CrossRef/Util/CodeBlockCaptions.hs | gpl-2.0 | 1,318 | 0 | 12 | 202 | 518 | 275 | 243 | 32 | 1 |
--project euler problem 15
{--
Starting in the top left corner of a 2×2 grid, there are 6 routes (without backtracking) to the bottom right corner.
How many routes are there through a 20×20 grid?
--}
{--
mathematically this is a trivial problem... pascal's triangle to the rescue!
--}
factorial n = product [1..n]
m `choose` n = (factorial m) `div` ((factorial n) * (factorial (m-n)))
main = do print $ 40 `choose` 20
| goalieca/haskelling | 015.hs | gpl-3.0 | 425 | 0 | 11 | 79 | 89 | 49 | 40 | 3 | 1 |
instance Functor (Reader r) where
fmap f g = f . g | hmemcpy/milewski-ctfp-pdf | src/content/1.8/code/haskell/snippet22.hs | gpl-3.0 | 54 | 0 | 7 | 15 | 29 | 14 | 15 | 2 | 0 |
{-# language OverloadedStrings, NamedFieldPuns #-}
module Redsift.SignUrl (signUrl, URI) where
import Network.HTTP
import Network.AWS.Authentication
import Network.AWS.AWSConnection
import Network.URI
import System.FilePath
type Bucket = String
type Object = String
-- $ Signs a url. Outputs the uri with the bucket as subdomain (not subpath).
signUrl :: (String, String) -> Bucket -> Object -> Integer -> URI
signUrl (accessKey, secretKey) bucket object expirationTime =
convertToEndpointURI (preSignedURI action expirationTime)
where
action :: S3Action
action = S3Action {
s3conn = connection,
s3bucket = bucket,
s3object = object,
s3query = "",
s3metadata = [],
s3body = "",
s3operation = GET
}
connection :: AWSConnection
connection = AWSConnection {
awsHost = "s3.amazonaws.com",
awsPort = 80,
awsAccessKey = accessKey,
awsSecretKey = secretKey
}
-- | Network.AWS.Authentication.preSignedURI does return the signed URI
-- with the bucket as a subpath, but we need it as a subdomain.
-- (I think that buckets as subdomains are called endpoints.)
convertToEndpointURI :: URI -> URI
convertToEndpointURI input = case input of
URI{uriAuthority = Just (URIAuth userInfo regName _), uriPath} ->
case splitDirectories uriPath of
("/" : bucket : rest) ->
input{
uriScheme = "https:",
uriAuthority = Just $ URIAuth userInfo
(bucket ++ "." ++ regName) "",
uriPath = joinPath ("/" : rest)
}
_ -> error ("convertToEndpointURI: uri should start with /$BUCKET: " ++ show input)
_ ->
error ("convertToEndpointURI: uri should contain the authority: " ++ show input)
| zalora/redsift | Redsift/SignUrl.hs | gpl-3.0 | 1,849 | 0 | 17 | 521 | 376 | 216 | 160 | 40 | 3 |
{-# LANGUAGE TypeFamilies, FlexibleInstances, FlexibleContexts, ViewPatterns, RecordWildCards, NamedFieldPuns, ScopedTypeVariables, TypeSynonymInstances, NoMonomorphismRestriction, TupleSections, StandaloneDeriving, GeneralizedNewtypeDeriving, DeriveDataTypeable, MultiParamTypeClasses #-}
module R3Immersions where
import MathUtil
import Numeric.AD.Vector
import Data.VectorSpace
import Tetrahedron.Vertex
import Tetrahedron.Edge
import ConcreteNormal
import Util
import R3Immersions.Simplices
semidisk
:: (RealFloat (Scalar v), Show (Scalar v), VectorSpace v) =>
v -> v -> v -> Tup2 (Scalar v) -> v
semidisk im00 im01 im10 ab =
let
Tup2 (x,y) = standardSemidisk ab
-- (x,y) = c00 (1,0) + c01 * (0,-1) + c10 * (0,1)
-- c00 + c01 + c10 = 1
-- <=>
-- x = c00
-- y = c10 - c01
-- c00 + c01 + c10 = 1
c00 = x
c01 = (1 - x - y)/2
c10 = y + c01
in
c00 *^ im00
^+^
c01 *^ im01
^+^
c10 *^ im10
-- | Maps:
--
-- * (0,0) to (1,0)
--
-- * (0,1) to (0,-1)
--
-- * (1,0) to (0,1)
--
-- Input is assumed to be in the unit 2-simplex
standardSemidisk (Tup2 (aness,bness)) =
let
-- runs from 0 to 1
abness = aness+bness
abnessDistorted = abness
-- runs from -1 to 1
a_vs_b = if abness == 0
then 0
else (aness-bness)/abness
xz_0 = -- if we're close to c (low abness), draw circular arcs
(let
phimax = acos (abnessDistorted/2)
foo = cos (a_vs_b*phimax) * abnessDistorted
bar = sin (a_vs_b*phimax) * abnessDistorted
in tup2 (1-foo) bar)
xz_1 = -- if we're close to ab, draw lines
(let x = 1-abnessDistorted
zbounds = sqrt' (1 - x^2)
-- x^2 + zbounds^2 = 1
z = zbounds*a_vs_b
in
tup2 x z)
xz@(Tup2 (x, z)) = slerpG abness xz_0 xz_1
in
xz
-- | x0 and x1 map to the equator, x2 to the top, x3 to the bottom
standardSnapped3Ball (Tup4 (x0, x1, x2, x3)) =
v_xy ^+^ (x2-x3) *^ tup3Z
where
a = x0 + x1
phi_min = 0
phi_max = 2*pi
phi = phi_min + (phi_max-phi_min) * x0/a
v_xy | a > 0 = a *^ tup3 (- (sin phi)) (cos phi) 0
| otherwise = zeroV
torusCoords' :: Floating a => a -> a -> SolidTorusPoint a -> Tup3 a
torusCoords' major minor (STP long lat boundaryness) =
let
minor' = minor * boundaryness
r = major + cos lat * minor'
in
tup3 (cos long * r) (sin long * r) (sin lat * minor')
embedNCorner :: Fractional a1 => NmCorner -> Tup4 a1
embedNCorner c =
embedEd
(edge . unI . c_type $ c)
(interpol t tup2X tup2Y)
where
t =
((fi . unPos . c_pos) c + 1)
/
((fi . cornersOfSameType) c + 1)
| DanielSchuessler/hstri | R3Immersions.hs | gpl-3.0 | 3,363 | 0 | 17 | 1,441 | 815 | 435 | 380 | 67 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
module Main where
import Data.List.Split
import Data.String
import Network.HTTP
import Text.JSON.Generic
import Control.Concurrent.Async
-- This is a quick helper to allow me to add the object first over the value.
(!&!) :: (JSON a) => JSObject JSValue -> String -> Result a
(!&!) = flip valFromObj
-- I don't really like how this item looks, but it's just there to grab query
-- item out of the json object
data QueryObj = QueryObj
{
query ::ResultsObj
} deriving Show
-- This grabs the result out of the JSON object.
data ResultsObj = ResultsObj
{
results :: QuoteObj
} deriving Show
-- This grabs the quote out of the JSON object...Yahoo really does nest this data
-- way too much.
data QuoteObj = QuoteObj
{
quote :: [Stock]
} deriving Show
-- Here's the meat of it all; this is a handle on the symbol and the daysHigh value
-- from the stock object.
data Stock = Stock
{
sy :: String,
daysHigh :: JSValue
} deriving Show
-- God I write a lot of helper functions. Anyway, this
-- is just a quick composition to extract out the stocks from
-- all these intermediate objects.
getStocks :: QueryObj ->[Stock]
getStocks = quote . results . query
-- This is the first handle on the JSON, and it passes the processing
-- off to results.
instance JSON QueryObj where
-- Keep the compiler quiet
showJSON = undefined
readJSON (JSObject obj) =
QueryObj <$>
obj !&! "query"
-- This hands processing off to the Quote
instance JSON ResultsObj where
-- Keep the compiler quiet
showJSON = undefined
readJSON (JSObject obj) =
ResultsObj <$>
obj !&! "results"
-- This hands processing off to the Stock
instance JSON QuoteObj where
showJSON = undefined
readJSON (JSObject obj) =
QuoteObj <$>
obj !&! "quote"
-- And this finally grabs values and places them into an object we can
-- use. Once we have an object, it's relatively straightforward to make
-- into a comma-separated list.
instance JSON Stock where
-- Keep the compiler quiet
showJSON = undefined
readJSON (JSObject obj) = do
s <- obj !&! "Symbol"
dh <- obj !&! "DaysHigh"
-- d <- obj !&! "Ask"
return Stock{sy = s, daysHigh = dh}
--Stock <$>
--obj !&! "symbol" <*>
--obj !&! "daysHigh"
--readJSON _ = mzero
-- The name is self explanatory here.
quoteWrap x = "\"" ++ x ++ "\""
-- This is a quick composition to break up a file by newline, wrap it
-- in quotes, then break it up into 1000 element chunks.
breakAndChunk = (chunksOf 1000) . (map quoteWrap) . lines
commaList :: [String] -> String
commaList = tail . (foldl concatFields "")
parenWrap :: String -> String
parenWrap x = "(" ++ x ++ ")"
queryBuilder :: String -> String
queryBuilder x = "use \"http://github.com/spullara/yql-tables/raw/d60732fd4fbe72e5d5bd2994ff27cf58ba4d3f84/yahoo/finance/yahoo.finance.quotes.xml\" as quotes; select * from quotes where symbol in " ++ x
urlBuilder :: String -> String
urlBuilder x = "http://query.yahooapis.com/v1/public/yql?format=json&q=" ++ x
-- Gotta love function composition. This is a quick helper function that
-- converts a list of strings to comma-separated, then wraps them in parens,
-- builds a yql query, then makes it safe for URLs, then builds a URL out of it.
makeUrl = urlBuilder . urlEncode . queryBuilder . parenWrap . commaList
-- This is a very simple function that takes in two strings and puts a comma
-- between them. Not particularly useful by itself; it's handy for folds.
concatFields :: String -> String -> String
concatFields x y = x ++ "," ++ y
-- Very simple helper function to do a basic get request
get :: String -> IO String
get url = simpleHTTP (getRequest url) >>= getResponseBody
-- Since there are only two type constructors for the Result typeclass,
-- we don't need to do any kind of fancy Monad magic. Instead, we can
-- utilize pattern matching and just strip out the type constructors.
removeResult :: Result a -> a
removeResult (Ok x) = x
removeResult (Error x) = error x
-- This is a hack; There's a very-finit amount of type constructors for this JSValue thing,
-- and all I really care about are strings and nulls. As such, I can just pattern-match on
-- type constructors and grab out the values.
removeJSValue :: JSValue -> String
removeJSValue (JSString x) = fromJSString x
removeJSValue _ = "null"
jsonToAllStocks = getStocks . removeResult . (\x -> decode x :: Result QueryObj)
-- This is just a quick helper function to convert all the JSON into stocks
getAllStocks = map jsonToAllStocks
prettyPrint = map (\x -> concatFields (sy x) (removeJSValue (daysHigh x)))
main :: IO ()
main = do
-- This probably should change; right now we have a file that contains all
-- the stock symbols, which is just a big CSV with no commas, only newlines.
symbolsFile <- readFile "symbols.csv"
-- Now that we have a handle on the symbols, let's get them into proper chunks
-- to happily handle yahoo's rate-limit.
let symbols = breakAndChunk symbolsFile
-- Now that we have all the symbols that are properly chunked,
-- we can make a big list of URLs to send to yahoo
let urls = map makeUrl symbols
-- mapConcurrently does what it says, but a note should be that it's a monadic-map,
-- hence the <-.
responses <- mapConcurrently get urls
let myObjects = getAllStocks responses
let yo = prettyPrint (concat myObjects)
mapM_ putStrLn yo
| Tombert/StockScrape | src/Main.hs | gpl-3.0 | 5,759 | 0 | 12 | 1,416 | 880 | 484 | 396 | 78 | 1 |
{- |
Module : $Header$
Description : hsay.
Copyright : (c) Alexander Berntsen 2014
License : GPL-3
Maintainer : [email protected]
-} module Main where
import Control.Arrow
(
second,
)
import Data.List
(
intersperse,
)
import Data.Monoid
(
Sum (Sum),
mappend,
mempty,
)
import GHC.IO.Exception
(
ExitCode,
)
import Network.HTTP.Base
(
urlEncode,
)
import System.Environment
(
getArgs,
)
import System.Exit
(
exitWith,
)
import System.Hclip
(
getClipboard,
)
import System.IO
(
hFlush,
stdout,
)
import System.Posix.IO
(
stdInput,
)
import System.Posix.Terminal
(
queryTerminal,
)
import System.Process
(
spawnProcess,
waitForProcess,
)
import Paths_hsay
data Language = MkLang {lang :: String}
| DefLang {lang :: String}
defprogopts :: (String, [String])
defprogopts = ("mpg123", ["-q"])
infixr 9 ~+~
(~+~) :: IO a -> IO a -> IO a
f ~+~ g = queryTerminal stdInput >>= \t -> if t then f else g
main :: IO ()
main = do
as <- getArgs
f <- getDataFileName "data/flip.mp3"
uncurry tts (getLang as) f
getLang :: [String] -> (Language, [String])
getLang (('-':l):xs) = (MkLang l, xs)
getLang xs = (DefLang "en", xs)
tts :: Language -> [String] -> FilePath -> IO ()
tts l [] f = resl l f ~+~ (getContents >>= \cs -> run l (words cs) f)
tts l as f = run l as f
run :: Language -> [String] -> FilePath -> IO ()
run l as f = fork (build l as f) >>= exitWith
resl :: Language -> FilePath -> IO ()
resl l f = do
putStr ">"
hFlush stdout
n <- getLine
case take 5 n of
"#LANG" -> resl (MkLang $ drop 6 n) f
"#CLIP" -> do
c <- getClipboard
fork (build l (words c) f) >> resl l f
_ -> fork (build l (words n) f) >> resl l f
fork :: (FilePath, [String]) -> IO ExitCode
fork f = uncurry spawnProcess f >>= waitForProcess
build :: Language -> [String] -> FilePath -> (FilePath, [String])
build l xs f = (++ intersperse f [ mkUrl l $ unwords t
| t <- chunk [] xs ]) `second` defprogopts
mkUrl :: Language -> String -> String
mkUrl l us = "http://translate.google.com/translate_tts?ie=UTF-8&tl="
++ lang l ++ "&q=" ++ urlEncode us
concatInits :: [Sum Int] -> [Sum Int]
-- Thanks to ollef for this.
concatInits = go mempty
where
go _ [] = []
go acc (x:xs) = x' : go x' xs where x' = mappend acc x
fit :: [[a]] -> [[a]]
-- Thanks to ollef for this.
fit xs = map snd $ takeWhile ((< Sum 101) . fst) $ zip lxs xs
where lxs = concatInits $ map (Sum . (+1) . length) xs
chunk :: Eq a => [[[a]]] -> [[a]] -> [[[a]]]
-- The left list is the parsed-to-appropriate-size 'a's.
-- The right list is the yet-to-be-parsed-to-appropriate-size 'a's.
chunk xs [] = xs
chunk xs yss@(y:ys) = case fit [y] of
[] -> chunk (xs ++ [[take 99 y]]) (drop 99 y : ys)
_ -> chunk (xs ++ [fit yss]) (diff (fit yss) yss)
diff :: Eq a => [a] -> [a] -> [a]
diff [] ys = ys
diff _ [] = []
diff (x:xs) (y:ys) | x == y = diff xs ys
| otherwise = ys
| alexander-b/hsay | src-exec/hsay.hs | gpl-3.0 | 3,166 | 0 | 17 | 913 | 1,310 | 699 | 611 | 98 | 3 |
{-# LANGUAGE RecordWildCards #-}
module Main where
import Data.Monoid ((<>))
import Options.Applicative
import Game.Regret (defaultMain)
import Game.Select (SelectGame(SelectGame))
import Game.Dudo (Dudo(..))
main :: IO ()
main = defaultMain $
(\dieSides -> SelectGame Dudo{..})
<$> option auto
( long "die-sides"
<> help "Number of sides on the die"
<> metavar "INT"
<> showDefault
<> value 6
)
| davidspies/regret-solver | dudo/app/Main.hs | gpl-3.0 | 444 | 0 | 12 | 104 | 135 | 76 | 59 | 16 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Drive.Replies.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists a comment\'s replies.
--
-- /See:/ <https://developers.google.com/drive/ Drive API Reference> for @drive.replies.list@.
module Network.Google.Resource.Drive.Replies.List
(
-- * REST Resource
RepliesListResource
-- * Creating a Request
, repliesList
, RepliesList
-- * Request Lenses
, rlPageToken
, rlFileId
, rlCommentId
, rlPageSize
, rlIncludeDeleted
) where
import Network.Google.Drive.Types
import Network.Google.Prelude
-- | A resource alias for @drive.replies.list@ method which the
-- 'RepliesList' request conforms to.
type RepliesListResource =
"drive" :>
"v3" :>
"files" :>
Capture "fileId" Text :>
"comments" :>
Capture "commentId" Text :>
"replies" :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "includeDeleted" Bool :>
QueryParam "alt" AltJSON :> Get '[JSON] ReplyList
-- | Lists a comment\'s replies.
--
-- /See:/ 'repliesList' smart constructor.
data RepliesList = RepliesList'
{ _rlPageToken :: !(Maybe Text)
, _rlFileId :: !Text
, _rlCommentId :: !Text
, _rlPageSize :: !(Textual Int32)
, _rlIncludeDeleted :: !Bool
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RepliesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rlPageToken'
--
-- * 'rlFileId'
--
-- * 'rlCommentId'
--
-- * 'rlPageSize'
--
-- * 'rlIncludeDeleted'
repliesList
:: Text -- ^ 'rlFileId'
-> Text -- ^ 'rlCommentId'
-> RepliesList
repliesList pRlFileId_ pRlCommentId_ =
RepliesList'
{ _rlPageToken = Nothing
, _rlFileId = pRlFileId_
, _rlCommentId = pRlCommentId_
, _rlPageSize = 20
, _rlIncludeDeleted = False
}
-- | The token for continuing a previous list request on the next page. This
-- should be set to the value of \'nextPageToken\' from the previous
-- response.
rlPageToken :: Lens' RepliesList (Maybe Text)
rlPageToken
= lens _rlPageToken (\ s a -> s{_rlPageToken = a})
-- | The ID of the file.
rlFileId :: Lens' RepliesList Text
rlFileId = lens _rlFileId (\ s a -> s{_rlFileId = a})
-- | The ID of the comment.
rlCommentId :: Lens' RepliesList Text
rlCommentId
= lens _rlCommentId (\ s a -> s{_rlCommentId = a})
-- | The maximum number of replies to return per page.
rlPageSize :: Lens' RepliesList Int32
rlPageSize
= lens _rlPageSize (\ s a -> s{_rlPageSize = a}) .
_Coerce
-- | Whether to include deleted replies. Deleted replies will not include
-- their original content.
rlIncludeDeleted :: Lens' RepliesList Bool
rlIncludeDeleted
= lens _rlIncludeDeleted
(\ s a -> s{_rlIncludeDeleted = a})
instance GoogleRequest RepliesList where
type Rs RepliesList = ReplyList
type Scopes RepliesList =
'["https://www.googleapis.com/auth/drive",
"https://www.googleapis.com/auth/drive.file",
"https://www.googleapis.com/auth/drive.readonly"]
requestClient RepliesList'{..}
= go _rlFileId _rlCommentId _rlPageToken
(Just _rlPageSize)
(Just _rlIncludeDeleted)
(Just AltJSON)
driveService
where go
= buildClient (Proxy :: Proxy RepliesListResource)
mempty
| rueshyna/gogol | gogol-drive/gen/Network/Google/Resource/Drive/Replies/List.hs | mpl-2.0 | 4,311 | 0 | 18 | 1,112 | 638 | 375 | 263 | 95 | 1 |
{-# LANGUAGE InstanceSigs, ExplicitForAll, DeriveDataTypeable #-}
module Length
(Length(..), to_miles, add)
where
import Data.Generics (everywhere, mkT, Data, Typeable)
data Foo = Foo String Int
deriving (Eq, Show, Typeable, Data)
newtype Length = Length Foo
deriving (Eq, Show, Typeable, Data)
-- data Miles deriving (Data)
-- data Kilometers
to_miles :: Int -> Maybe Length
to_miles x | x<0 = Nothing
to_miles x = Just (Length $ Foo (show x) x)
add :: Length -> Length -> Length
add (Length (Foo _ x)) (Length (Foo _ y)) | (x<0) || (y<0) = undefined
add (Length (Foo _ x)) (Length (Foo _ y)) = Length $ Foo (show $ x+y) (x+y)
| dboulytchev/generic-transformers | papers/EPTCS/sybdemo/app/Length.hs | lgpl-2.1 | 670 | 0 | 10 | 150 | 297 | 158 | 139 | 14 | 1 |
-- | Frp handlers specific to this program. Will likely not be split
-- into a separate package.
module Jaek.UI.FrpHandlersCustom (
Focus
,genBZip
,genBDraw
,genDFocus
)
where
import Jaek.Base
import Jaek.Render
import Jaek.Peaks
import Jaek.StreamExpr
import Jaek.Tree
import Jaek.UI.Focus
import Jaek.UI.FrpHandlers
import Jaek.UI.Views
import Reactive.Banana as FRP
import Diagrams.Prelude as D hiding (First (..))
import Diagrams.Backend.Cairo
import Data.Label as L
import Data.Maybe
import Data.Monoid
import Control.Arrow (second)
import Control.Concurrent.STM
-- | generate the behavior of the zipper and the viewmap. Since
-- the viewmap depends on the zipper, the two need to be created
-- together because there's no guaranteed ordering on behaviors.
genBZip ::
HTree
-> Event (String, HTree)
-> Event (String, [StreamExpr])
-> Event (TreeZip -> TreeZip)
-> Event (ViewMap -> ViewMap)
-> Event Focus
-> (Discrete TreeZip, Discrete ViewMap)
genBZip iTree eNewDoc eNewSource eTreeMod eViewMod eFocChange =
(fst <$> bPair, snd <$> bPair)
where
bPair = accumD (zipper iTree, mapFromTree iTree) $ mconcat
[(\(_rt,ht) (_z,mp) ->
let z' = zipper ht
in (z', updateMap z' NewDoc mp)) <$> eNewDoc
,(\(n1,n2) (zp,mp) ->
let z' = newSource n1 n2 zp
in (z', updateMap z' AddSrc mp)) <$> eNewSource
,(\updateF (zp,mp) ->
let z' = updateF zp
in (z', updateMap z' MdNode mp)) <$> eTreeMod
,(\newFoc (zp,mp) -> (goToFocus zp newFoc, mp)) <$> eFocChange
,second <$> eViewMod ]
-- | Generate @Discrete (IO Focus)@
-- the @Event Focus@ are emitted when the focus changes, and can be used to
-- trigger screen refreshes
-- it's important to only trigger focus events when the focus actually changes
genDFocus :: Behavior (QDiagram Cairo R2 (First TreePath))
-> Event ClickEvent
-> Event Focus
-> Event TreeZip
-> Discrete Focus
genDFocus bDraw clicks eFocChange eTreeChange = dfoc
where
dfoc = stepperD Nothing eFilt
beh = FRP.value dfoc
eFilt = filterApply ((/=) <$> beh) eFocus
eFocus = (Just . getPath <$> eTreeChange)
`mappend` filterE isJust (
-- change from Tree to Wave
FRP.apply ((\d clk -> getFirst $ runQuery (query d)
(p2 $ L.get xyClick clk))
<$> bDraw)
(filterApply ((const . isTree) <$> beh) clicks) )
-- change from Wave to Tree
`mappend` eFocChange
-- | generate a Behavior Diagram producer
genBDraw ::
TVar PathMap
-> Behavior FilePath
-> Behavior TreeZip
-> Behavior Focus
-> Behavior (Int, Int)
-> Behavior ViewMap
-> Behavior (QDiagram Cairo R2 (First TreePath))
genBDraw mpRef bRoot bZip getFocus bsize bview =
drawAt mpRef <$> bRoot <*> bZip <*> getFocus <*> bsize <*> bview
| JohnLato/jaek | src/Jaek/UI/FrpHandlersCustom.hs | lgpl-3.0 | 2,989 | 0 | 21 | 806 | 823 | 452 | 371 | 69 | 1 |
--
-- xmonad example config file for xmonad-0.9
--
-- A template showing all available configuration hooks,
-- and how to override the defaults in your own xmonad.hs conf file.
--
-- Normally, you'd only override those defaults you care about.
--
-- NOTE: Those updating from earlier xmonad versions, who use
-- EwmhDesktops, safeSpawn, WindowGo, or the simple-status-bar
-- setup functions (dzen, xmobar) probably need to change
-- xmonad.hs, please see the notes below, or the following
-- link for more details:
--
-- http://www.haskell.org/haskellwiki/Xmonad/Notable_changes_since_0.8
--
--
-- TODO:
-- - Check whether xcompmgr is present and set window border width to 1 if it
-- is not.
-- - keys to switch to specific windows (i.e. alt+left to switch to the window
-- on the left, alt+right to switch to the window on the right - although
-- probably nicer to use alt+j/k.
-- - some default layouts on VM workspaces, pidgin workspace, firefox workspace
-- - a better layout for pidgin workspace
-- - more controlled layout switching; i.e. <M-S-numpad1> through <M-S-numpadn> for layouts
-- - A submenu alt+space, [1|2|3|4|..] to select layout
-- - Select layout by name. See the 'description' method on LayoutClass here:
-- https://hackage.haskell.org/package/xmonad-bluetilebranch-0.9.1.4/docs/XMonad-Core.html
-- That combined with dmenu would probably allow selection of layouts by name
-- - SmartBorders
-- - ephemeral workspace names - pop up a teensy menu to name a workspace, then
-- pop up a menu to select one by name (dmenu?)
-- - ephemeral window names - pop up a teensy menu to name a window, then pop up
-- a menu to select one by name (dmenu?) (or by number?)
-- - searchable window names: create a list of all windows currently open, and
-- relevant properties (_NET_WM_NAME, WM_NAME, WM_CLASS, WM_WINDOW_ROLE etc.) then
-- provide them to dmenu and switch to the workspace they're on and focus them
-- - toggling of window titles and borders
-- - Check this out:
-- http://xmonad.org/xmonad-docs/xmonad-contrib/XMonad-Layout-DecorationMadness.html
-- - disable focus-follows-mouse on accordion layouts (focus on click?)
-- - accordion layout where all windows simply disappear into a title bar when
-- they're not focused? (Is this approximatley simpletabbed?)
-- - Regarding accordion layout, do I really want a tabbed layout with window
-- titles or something? Where M-j selects a different tab, which
-- subsequently fills the window (except the tab bar)?
-- - 'Find me an empty workspace' functionality
-- - Temporary workspaces? Perhaps in a different workspace namespace or
-- something? (How would I get back to it?)
-- - Some sort of command-watcher; i.e. plug a command-line command into xmonad and display its
-- result/progress/output somewhere on-screen
-- - Process killing dmenu (or possibly an xmonad-specific alternative)
-- - Tool to duplicate a terminal (or possibly any given window) by checking the directory open in
-- the current terminal. Use case: want to open new terminal at directory of existing terminal
-- in minimal keystrokes. If inside another prog (e.g. vim) open new terminal at same directory
-- as current terminal regardless. Probably has to be shell/terminal-dependent. (Can we get
-- working directory of open shell session?)
-- - Something like <M-O> for currently visible windows; <M-S-O>: overlay all currently visible
-- windows with a key to press to focus that window. (Like easymotion for xmonad).
-- - Window marking to jump to windows, like marks in vim. XMonad.Actions.TagWindows.
-- - Macros? See VIMonad? And: http://lynnard.me/blog/2013/11/05/building-a-vim-like-xmonad-prompt-task-groups-topical-workspaces-float-styles-and-more/
-- - When opening a file in vim that's already open in xmonad, jump to that window/workspace (this
-- is probably a zshrc thing, but it's in these todos anyway)
-- - Further investigate how/why VM viewers grab the keyboard. Can this be avoided?
-- - Not really sure this is the place for this, but can we use ICCCM or EWMH to enable a pop-up
-- that displays on the visible/current workspaces to take us to the application that generated
-- it?
-- - GridSelect with overlay keys like easymotion
-- - Make and experiment with 'focus' functionality, which fades or blacks out all screens except
-- the focussed one
-- - Command to jump to any empty workspace
-- - Extension to display pstree overlay on each window, where the root is the top pid for that
-- window
-- - Can I have a bunch of unnamed workspaces that can't be accessed with hotkeys, but contain
-- things like whatsapp that I navigate to by typing their name? Ideally each of these should
-- contain a single application, so that I can display each on a different screen.
-- - Bug: open ten terminal windows to the same directory. Now press M-O. There will only be one
-- of those terminals listed. Perhaps dmenu is ignoring duplicates?
-- - Loud overlay when I press caps lock? Or just remap capslock.. (Can I do that with xmonad?)
-- - Opposite of current <M-b> - send C-q or C-S-q to various applications
-- - World clock workspace
-- - Look for inspiration in other peoples' xmonad.hs
-- - Add status bar. Primarily to show status of WireGuard connection. But might as well show some
-- workspaces; notifications like WhatsApp/Slack/Hangouts/Gmail etc (maybe?); battery where
-- applicable; internet connectivity; time; current song; CPU load; mem usage; iotop or disk
-- usage;caps-lock, scroll-lock, num-lock status; audio volume/mute.
-- - The Hangouts web app changes its title periodically. Can we stop this from happening so we
-- can always refer to it as Hangouts when using M-O to go to a specific window?
-- - It'd be good to be able to run the various WhatsApp, Hangouts, Gmail web apps from the shell.
-- This might not really be an xmonad todo..
-- - Command to create a new throw-away chromium instance. I.e. in private browsing mode, with no
-- history, no profile, etc.
import XMonad
import Data.Monoid
import Data.List
import System.Exit
import XMonad.Layout.NoBorders
import XMonad.Actions.Warp
import XMonad.Actions.WindowGo
import XMonad.Actions.CycleRecentWS
import XMonad.Actions.Search
import XMonad.Actions.Navigation2D
{- TODO: remove the following module; it was just used for testing -}
import XMonad.Layout.ShowWName
import XMonad.Hooks.FadeInactive
import XMonad.Hooks.FadeWindows
import XMonad.Layout.Grid
import XMonad.Layout.NoFrillsDecoration
import XMonad.Layout.Accordion
import XMonad.Layout.Spiral
import XMonad.Actions.WindowBringer
import XMonad.Actions.TagWindows
import XMonad.Prompt
import XMonad.Util.XUtils
import XMonad.Util.Font
-- import XMonad.Prompt.Window
import Control.Monad
import XMonad.Actions.EasyMotion (selectWindow, EasyMotionConfig(..))
import qualified XMonad.Prompt as P
import qualified XMonad.Actions.Submap as SM
import qualified XMonad.Actions.Search as S
import qualified XMonad.StackSet as W
import qualified Data.Map as M
import qualified XMonad.Util.WindowProperties as WP
-- The preferred terminal program, which is used in a binding below and by
-- certain contrib modules.
--
-- myTerminal = "urxvt"
myTerminal = "alacritty"
-- Whether focus follows the mouse pointer.
myFocusFollowsMouse :: Bool
myFocusFollowsMouse = True
-- Width of the window border in pixels.
--
myBorderWidth = 1
-- modMask lets you specify which modkey you want to use. The default
-- is mod1Mask ("left alt"). You may also consider using mod3Mask
-- ("right alt"), which does not conflict with emacs keybindings. The
-- "windows key" is usually mod4Mask.
--
myModMask = mod1Mask
-- NOTE: from 0.9.1 on numlock mask is set automatically. The numlockMask
-- setting should be removed from configs.
--
-- You can safely remove this even on earlier xmonad versions unless you
-- need to set it to something other than the default mod2Mask, (e.g. OSX).
--
-- The mask for the numlock key. Numlock status is "masked" from the
-- current modifier status, so the keybindings will work with numlock on or
-- off. You may need to change this on some systems.
--
-- You can find the numlock modifier by running "xmodmap" and looking for a
-- modifier with Num_Lock bound to it:
--
-- > $ xmodmap | grep Num
-- > mod2 Num_Lock (0x4d)
--
-- Set numlockMask = 0 if you don't have a numlock key, or want to treat
-- numlock status separately.
--
-- myNumlockMask = mod2Mask -- deprecated in xmonad-0.9.1
------------------------------------------------------------
-- The default number of workspaces (virtual screens) and their names.
-- By default we use numeric strings, but any string may be used as a
-- workspace name. The number of workspaces is determined by the length
-- of this list.
--
-- A tagging example:
--
-- > workspaces = ["web", "irc", "code" ] ++ map show [4..9]
--
myWorkspaces = ["`","1","2","3","4","5","6","7","8","9","0","-","=","BS","INS","HOME","PGUP"] --,"NUM_LOCK","/","*"]
-- Border colors for unfocused and focused windows, respectively.
--
-- myNormalBorderColor = "#dddddd"
-- myFocusedBorderColor = "#ff0000"
myNormalBorderColor = "#002b36" -- Solarized dark background colour
myFocusedBorderColor = "#657b83" -- Solarized dark foreground colour
-- Search engines
ddg = searchEngine "DuckDuckGo" "https://duckduckgo.com/?q="
-- Search engine map
searchEngineMap method = M.fromList $
[ ((0, xK_g), method S.google)
, ((0, xK_w), method S.wikipedia)
, ((0, xK_m), method S.maps)
, ((0, xK_d), method ddg)
]
-- Warning: This gotoWindow function assumes you made your workspaces
-- with the 'withScreens' function from XMonad.Layout.IndependentScreens
-- gotoWindow :: Window -> WindowSet -> WindowSet
-- gotoWindow window ws = case S.findTag window ws of
-- Just i -> viewOnScreen (screenIdFromTag i) i ws
-- Nothing -> ws
-- where
-- screenIdFromTag :: WorkspaceId -> ScreenId
-- screenIdFromTag = S . read . takeWhile (/= '_')
-- layoutMap
searchAndGoTo = do
SM.submap $ searchEngineMap $ S.promptSearch P.defaultXPConfig
runOrRaiseNext "firefox" (stringProperty "WM_WINDOW_ROLE" =? "browser")
displayDateTwoScreens = do
spawn "date | dzen2 -fg \"#ffffff\" -bg \"#000000\" -p 2 -fn '-*-terminus-*-r-normal-*-*-120-*-*-*-*-iso8859-*' -xs 1"
spawn "date | dzen2 -fg \"#ffffff\" -bg \"#000000\" -p 2 -fn '-*-terminus-*-r-normal-*-*-120-*-*-*-*-iso8859-*' -xs 2"
-- Check whether a query passes. If it does, do nothing. If it does not, run
-- "spawn spawncmd"
checkAndSpawn :: XMonad.Query Bool -> String -> X ()
checkAndSpawn query spawncmd =
ifWindows query (\w -> return ()) (spawn spawncmd)
-- Start stuff
startStuff = composeAll
[ checkAndSpawn (className =? "Firefox") "firefox"
, checkAndSpawn (className =? "Spotify") "spotify"
, checkAndSpawn (className =? "chromium") "chromium"
-- , checkAndSpawn (className =? "Pidgin") "pidgin"
, checkAndSpawn (className =? "win7vm") "virt-viewer -c qemu:///system -w -f win7 --class win7vm"
, checkAndSpawn (className =? "urxvt-iotop") "urxvt -name \"urxvt-iotop\" -e sudo iotop"
, checkAndSpawn (className =? "urxvt-htop") "urxvt -name \"urxvt-htop\" -e htop"
, checkAndSpawn (className =? "keep.google.com") "chromium --app=https://keep.google.com --user-data-dir=$HOME/.config/chromium_gmail/"
, checkAndSpawn (className =? "web.whatsapp.com") "chromium --app=https://web.whatsapp.com --user-data-dir=$HOME/.config/chromium_whatsapp/"
, checkAndSpawn (className =? "mail.google.com") "chromium --app=https://mail.google.com --user-data-dir=$HOME/.config/chromium_gmail/"
, checkAndSpawn (className =? "calendar.google.com") "chromium --app=https://calendar.google.com --user-data-dir=$HOME/.config/chromium_gmail/"
, checkAndSpawn (className =? "hangouts.google.com") "chromium --app=https://hangouts.google.com --user-data-dir=$HOME/.config/chromium_gmail/"
, checkAndSpawn (className =? "ipegcorp.slack.com") "chromium --app=https://ipegcorp.slack.com --user-data-dir=$HOME/.config/ipeg_slack/"
, checkAndSpawn (className =? "Signal") "signal-desktop"
]
emConf :: EasyMotionConfig
emConf = def { sKeys = [[xK_d, xK_s, xK_a, xK_f], [xK_h, xK_j, xK_k, xK_l]], maxChordLen = 1 }
------------------------------------------------------------------------
-- Key bindings. Add, modify or remove key bindings here.
--
myKeys conf@(XConfig {XMonad.modMask = modm}) = M.fromList $
-- launch a terminal
[ ((modm .|. shiftMask, xK_Return), spawn $ XMonad.terminal conf)
-- window tagging (m-a, 'a' for 'annotate')
, ((modm, xK_a ), tagPrompt def (withFocused . addTag))
-- , ((modm .|. shiftMask, xK_a ), tagPrompt defaultXPConfig (`withTaggedGlobalP` gotoWindow))
-- , ((modm .|. shiftMask, xK_a ), tagPrompt defaultXPConfig (\s -> withTaggedGlobalP s shiftHere))
-- , ((modm .|. shiftMask, xK_a ), tagPrompt defaultXPConfig (\s -> shiftToScreen s))
, ((modm, xK_f ), (selectWindow def { sKeys = [[xK_d, xK_s, xK_a, xK_f], [xK_h, xK_j, xK_k, xK_l]], maxChordLen = 1 }) >>= (flip whenJust (windows . W.focusWindow)))
-- search
, ((modm, xK_s ), searchAndGoTo)
-- suspend
, ((modm .|. shiftMask, xK_s ), spawn "systemctl suspend")
-- lock screen
, ((modm .|. shiftMask, xK_l ), spawn "xscreensaver-command --lock")
-- cycle through recent workspaces in recently-used order
-- need to sort this out so that it doesn't include any workspace currently visible on another
-- screen. I think? Or perhaps only workspaces that were previously visible on the given
-- screen.
-- , ((modm, xK_Tab ), cycleRecentWS [xK_Alt_L] xK_Tab xK_Tab)
-- launch firefox
-- , ((modm, xK_f ), runOrRaiseNext "firefox" (className =? "Firefox"))
-- launch dmenu
, ((modm, xK_p ), spawn "exe=`dmenu_path | dmenu` && eval \"exec $exe\"")
-- display date
, ((modm .|. shiftMask, xK_t ), displayDateTwoScreens)
-- move pointer
-- , ((modm .|. shiftMask, xK_b ), banish UpperLeft)
-- close focused window
, ((modm .|. shiftMask, xK_c ), kill)
-- Rotate through the available layout algorithms
, ((modm, xK_space ), sendMessage NextLayout)
-- Reset the layouts on the current workspace to default
, ((modm .|. shiftMask, xK_space ), setLayout $ XMonad.layoutHook conf)
-- Resize viewed windows to the correct size
, ((modm, xK_n ), refresh)
-- Open some stuff
, ((modm .|. shiftMask, xK_b ), startStuff)
-- Move focus to the next window
-- This has been commented out because
-- a) it's not used
-- b) it's convenient for windows vms to be able to use alt+tab, because
-- they're primitive and cant use xmonad
-- , ((modm, xK_Tab ), windows W.focusDown)
-- Turn volume up 10%
, ((modm, xK_KP_Add ), spawn "pactl set-sink-volume $(pactl list short | grep RUNNING | cut -f1) +10%")
-- Previous track
, ((modm, xK_KP_Left ), spawn "dbus-send --print-reply --dest=org.mpris.MediaPlayer2.spotify /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.Previous")
-- Play/pause
, ((modm, xK_KP_Begin ),
(ifWindows
(className =? "Spotify")
(\w -> spawn "dbus-send --print-reply --dest=org.mpris.MediaPlayer2.spotify /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.PlayPause")
(spawn "spotify")))
-- Next track
, ((modm, xK_KP_Right ), spawn "dbus-send --print-reply --dest=org.mpris.MediaPlayer2.spotify /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player.Next")
-- Turn volume down 10%
, ((modm, xK_KP_Subtract ), spawn "pactl set-sink-volume $(pactl list short | grep RUNNING | cut -f1) -10%")
-- Toggle mute
, ((modm, xK_KP_Insert ), spawn "pactl set-sink-mute 1 toggle")
-- Move focus to the next window
, ((modm, xK_j ), windows W.focusDown)
-- Move focus to the previous window
, ((modm, xK_k ), windows W.focusUp )
-- Move focus to the master window
, ((modm, xK_m ), windows W.focusMaster )
-- Swap the focused window and the master window
, ((modm, xK_Return), windows W.swapMaster)
-- Swap the focused window with the next window
, ((modm .|. shiftMask, xK_j ), windows W.swapDown )
-- Swap the focused window with the previous window
, ((modm .|. shiftMask, xK_k ), windows W.swapUp )
-- Move window focus left or right
, ((modm, xK_h ), windowGo L False)
, ((modm, xK_l ), windowGo R False)
-- Shrink the master area
-- , ((modm, xK_h ), sendMessage Shrink)
-- Expand the master area
-- , ((modm, xK_l ), sendMessage Expand)
-- Push window back into tiling
, ((modm, xK_t ), withFocused $ windows . W.sink)
-- Increment the number of windows in the master area
, ((modm , xK_comma ), sendMessage (IncMasterN 1))
-- Deincrement the number of windows in the master area
, ((modm , xK_period), sendMessage (IncMasterN (-1)))
-- Toggle the status bar gap
-- Use this binding with avoidStruts from Hooks.ManageDocks.
-- See also the statusBar function from Hooks.DynamicLog.
--
-- , ((modm , xK_b ), sendMessage ToggleStruts)
-- Window bringer
-- , ((modm , xK_f ), windowPrompt def Goto wsWindows)
{- TODO: this could be xK_/ when xK_f is easymotion-like -}
, ((modm , xK_o ), gotoMenuArgs ["-l","100","-i"])
-- Quit xmonad
, ((modm .|. shiftMask, xK_q ), io (exitWith ExitSuccess))
-- Restart xmonad
, ((modm , xK_q ), spawn "xmonad --recompile; xmonad --restart")
]
++
--
-- mod-[1..9], Switch to workspace N
--
-- mod-[1..9], Switch to workspace N
-- mod-shift-[1..9], Move client to workspace N
--
[((m .|. modm, k), windows $ f i)
| (i, k) <- zip (XMonad.workspaces conf) [xK_grave, xK_1, xK_2, xK_3, xK_4, xK_5, xK_6, xK_7, xK_8, xK_9, xK_0, xK_minus, xK_equal, xK_BackSpace, xK_Insert, xK_Home, xK_Page_Up] --, xK_Num_Lock, xK_KP_Divide, xK_KP_Multiply]
, (f, m) <- [(W.greedyView, 0), (W.shift, shiftMask)]]
++
--
-- mod-{w,e,r}, Switch to physical/Xinerama screens 1, 2, or 3
-- mod-shift-{w,e,r}, Move client to screen 1, 2, or 3
--
[((m .|. modm, key), screenWorkspace sc >>= flip whenJust (windows . f))
| (key, sc) <- zip [xK_w, xK_e, xK_r] [0..]
, (f, m) <- [(W.view, 0), (W.shift, shiftMask)]]
------------------------------------------------------------------------
-- Mouse bindings: default actions bound to mouse events
--
myMouseBindings (XConfig {XMonad.modMask = modm}) = M.fromList $
-- mod-button1, Set the window to floating mode and move by dragging
[ ((modm, button1), (\w -> focus w >> mouseMoveWindow w
>> windows W.shiftMaster))
-- mod-button2, Raise the window to the top of the stack
, ((modm, button2), (\w -> focus w >> windows W.shiftMaster))
-- mod-button3, Set the window to floating mode and resize by dragging
, ((modm, button3), (\w -> focus w >> mouseResizeWindow w
>> windows W.shiftMaster))
-- you may also bind events to the mouse scroll wheel (button4 and button5)
]
------------------------------------------------------------------------
-- Layouts:
-- You can specify and transform your layouts by modifying these values.
-- If you change layout bindings be sure to use 'mod-shift-space' after
-- restarting (with 'mod-q') to reset your layout state to the new
-- defaults, as xmonad preserves your old layout settings by default.
--
-- * NOTE: XMonad.Hooks.EwmhDesktops users must remove the obsolete
-- ewmhDesktopsLayout modifier from layoutHook. It no longer exists.
-- Instead use the 'ewmh' function from that module to modify your
-- defaultConfig as a whole. (See also logHook, handleEventHook, and
-- startupHook ewmh notes.)
--
-- The available layouts. Note that each layout is separated by |||,
-- which denotes layout choice.
--
myLayout = noBorders tiled ||| Mirror (noBorders tiled) ||| noBorders Full ||| GridRatio (16/10)
||| noFrillsDeco shrinkText defaultTheme (GridRatio (16/10))
||| noFrillsDeco shrinkText defaultTheme Accordion ||| spiral golden
where
-- default tiling algorithm partitions the screen into two panes
tiled = Tall nmaster delta ratio
-- The default number of windows in the master pane
nmaster = 1
golden = toRational (2/(1+sqrt(5)::Double))
-- Default proportion of screen occupied by master pane
ratio = 1/2
-- Percent of screen to increment by when resizing panes
delta = 3/100
------------------------------------------------------------------------
-- Window rules:
-- Execute arbitrary actions and WindowSet manipulations when managing
-- a new window. You can use this to, for example, always float a
-- particular program, or have a client always appear on a particular
-- workspace.
--
-- To find the property name associated with a program, use
-- > xprop | grep WM_CLASS
-- and click on the client you're interested in.
--
-- To match on the WM_NAME, you can use 'title' in the same way that
-- 'className' and 'resource' are used below.
--
myManageHook = composeAll
[ className =? "MPlayer" --> doFloat
, className =? "Gimp" --> doFloat
, className =? "Vmplayer" --> doFloat
, resource =? "desktop_window" --> doIgnore
, resource =? "kdesktop" --> doIgnore
, className =? "Firefox" --> doShift "`"
, className =? "Spotify" --> doShift "HOME"
-- , className =? "Pidgin" --> doShift "INS"
, className =? "Signal" --> doShift "BS"
, className =? "keep.google.com" --> doShift "BS"
, className =? "web.whatsapp.com" --> doShift "BS"
, className =? "mail.google.com" --> doShift "BS"
, className =? "chromium" --> doShift "="
, className =? "win7vm" --> doShift "PGUP"
, className =? "urxvt-iotop" --> doShift "PGUP"
, className =? "urxvt-htop" --> doShift "PGUP"
-- , fmap (isPrefixOf "Virt") className --> doShift "BS" -- Works
-- , fmap (isPrefixOf "win7") (stringProperty "WM_NAME") --> doShift "BS"
]
-- runOrRaiseNext "firefox" (stringProperty "WM_WINDOW_ROLE" =? "browser")
------------------------------------------------------------------------
-- Event handling
-- Defines a custom handler function for X Events. The function should
-- return (All True) if the default handler is to be run afterwards. To
-- combine event hooks use mappend or mconcat from Data.Monoid.
--
-- * NOTE: EwmhDesktops users should use the 'ewmh' function from
-- XMonad.Hooks.EwmhDesktops to modify their defaultConfig as a whole.
-- It will add EWMH event handling to your custom event hooks by
-- combining them with ewmhDesktopsEventHook.
--
-- myEventHook = fadeWindowsEventHook
myEventHook = mempty
------------------------------------------------------------------------
-- Status bars and logging
-- Perform an arbitrary action on each internal state change or X event.
-- See the 'XMonad.Hooks.DynamicLog' extension for examples.
--
--
-- * NOTE: EwmhDesktops users should use the 'ewmh' function from
-- XMonad.Hooks.EwmhDesktops to modify their defaultConfig as a whole.
-- It will add EWMH logHook actions to your custom log hook by
-- combining it with ewmhDesktopsLogHook.
--
myLogHook = fadeInactiveLogHook fadeAmount where fadeAmount = 0.92
-- myLogHook = fadeWindowsLogHook $ composeAll [isUnfocused --> transparency 0.2
-- , transparency 0.1
-- ]
------------------------------------------------------------------------
-- Startup hook
-- Perform an arbitrary action each time xmonad starts or is restarted
-- with mod-q. Used by, e.g., XMonad.Layout.PerWorkspace to initialize
-- per-workspace layout choices.
--
-- By default, do nothing.
--
-- * NOTE: EwmhDesktops users should use the 'ewmh' function from
-- XMonad.Hooks.EwmhDesktops to modify their defaultConfig as a whole.
-- It will add initialization of EWMH support to your custom startup
-- hook by combining it with ewmhDesktopsStartup.
--
myStartupHook = mempty
------------------------------------------------------------------------
-- Now run xmonad with all the defaults we set up.
-- Run xmonad with the settings you specify. No need to modify this.
--
main = xmonad defaults
-- A structure containing your configuration settings, overriding
-- fields in the default config. Any you don't override, will
-- use the defaults defined in xmonad/XMonad/Config.hs
--
-- No need to modify this.
--
defaults = defaultConfig {
-- simple stuff
terminal = myTerminal,
focusFollowsMouse = myFocusFollowsMouse,
borderWidth = myBorderWidth,
modMask = myModMask,
-- numlockMask deprecated in 0.9.1
-- numlockMask = myNumlockMask,
workspaces = myWorkspaces,
normalBorderColor = myNormalBorderColor,
focusedBorderColor = myFocusedBorderColor,
-- key bindings
keys = myKeys,
mouseBindings = myMouseBindings,
-- hooks, layouts
layoutHook = myLayout,
manageHook = myManageHook,
handleEventHook = myEventHook,
logHook = myLogHook,
startupHook = myStartupHook
}
| msk-/dotfiles | xmonad.hs | unlicense | 26,526 | 0 | 16 | 5,989 | 2,806 | 1,770 | 1,036 | 164 | 1 |
{-
Created : 2015 Feb 05 (Thu) 14:56:08 by Harold Carr.
Last Modified : 2016 Feb 04 (Thu) 20:04:24 by Harold Carr.
-}
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
module ThreepennyExternalNewEventDemo where
import Control.Concurrent (forkIO)
import Graphics.UI.Threepenny
import Network
import System.IO (hClose)
main :: IO ()
main = do
(eAccept, hAccept) <- newEvent
forkIO (acceptLoop hAccept 6789)
forkIO (acceptLoop hAccept 9876)
startGUI defaultConfig $ \win -> do
bAccept <- stepper "" eAccept
entree <- entry bAccept
element entree # set (attr "size") "10" # set style [("width","200px")]
getBody win #+ [element entree]
return ()
acceptLoop :: (String -> IO a) -> PortNumber -> IO b
acceptLoop hAccept bindAddr = do
s <- listenOn $ PortNumber bindAddr
loop s
where
loop s = do
(h, hostname, portNumber) <- accept s
hClose h
hAccept $ show bindAddr ++ " " ++ hostname ++ " " ++ show portNumber
loop s
-- End of file.
| haroldcarr/rdf-triple-browser | experiments/haskell-threepenny-gui-experiments/src/ThreepennyExternalNewEventDemo.hs | apache-2.0 | 1,095 | 0 | 15 | 321 | 318 | 154 | 164 | 26 | 1 |
{-# LANGUAGE DeriveFunctor #-}
module Ast where
import RecursionSchemes
data Literal = IntLit Int | StringLit String | UnitLit deriving Show
data ExpF a = Var String
| App a a
| Lam String a
| Lit Literal
| Let String a a
| IfThenElse a a a
deriving (Functor, Show)
type Exp = Fix ExpF
data Decl = Decl String Exp deriving Show
var :: String -> Exp
var x = In (Var x)
app :: Exp -> Exp -> Exp
app e1 e2 = In (App e1 e2)
lam :: String -> Exp -> Exp
lam s e = In (Lam s e)
lit :: Literal -> Exp
lit x = In (Lit x)
leT :: String -> Exp -> Exp -> Exp
leT s v b = In (Let s v b)
ifThenElse :: Exp -> Exp -> Exp -> Exp
ifThenElse p e1 e2 = In (IfThenElse p e1 e2)
| holoed/Junior | src/Ast.hs | apache-2.0 | 741 | 0 | 7 | 237 | 313 | 166 | 147 | 25 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
module Main
where
import Data.Typeable
import Data.Data
import Text.JSON (decode, Result(Ok, Error))
import Text.JSON.Generic (fromJSON)
import System.IO (hFlush, stdout)
data InMsg = InMsg
{ problem :: String,
state :: String
} deriving (Eq, Typeable, Data)
main :: IO()
main = readLoop
readLoop :: IO()
readLoop = do
line <- getLine
let propMsg = readJSON line :: Result InMsg
case propMsg of
Ok (InMsg prob st) ->
putStrLn $ makeResponse [show (read st :: [Int]), prob]
Error errorString ->
putStrLn errorString
hFlush stdout
readLoop
readJSON :: Data a => String -> Result a
readJSON s = do
res <- decode s
fromJSON res
makeResponse :: [String] -> String
makeResponse ls = "{\"worker input\":" ++ show ls ++ "}"
| disco-framework/disco | priv/squares/components/barkeeper/Main.hs | apache-2.0 | 845 | 0 | 15 | 214 | 296 | 155 | 141 | 30 | 2 |
-- program fails if year entered from command-line is not a number
main = do
putStrLn "Please Enter Your Birthyear: "
year <- getLine
putStrLn $ "In 2020, you will be : " ++ show (2020 - read year)
| dongarerahul/edx-haskell | main.hs | apache-2.0 | 205 | 0 | 11 | 46 | 44 | 20 | 24 | 4 | 1 |
module Main where
import qualified SimpleService_Client as Client
import Simple_Types
import Thrift
import Thrift.Protocol.Binary
import Thrift.Server
import Thrift.Transport
import Thrift.Transport.Handle
import Control.Exception
import Data.Either
import Data.Int
import Data.List
import Data.Maybe
import Data.Time
import Data.Text.Lazy
import Data.Vector
import Network
import System.Exit
import System.Random
import Text.Printf
getRight :: Either left right -> right
getRight (Right x) = x
simpleCPU :: [Double] -> [Double]
simpleCPU ([]) = []
simpleCPU (x:xs) = x*x+x : simpleCPU (xs)
check :: Vector Double -> [Double] -> Int -> Int -> [Int] -> [Double] -> [Double] -> (Int, [Int], [Double], [Double])
check outDFE [] status iter iterL outDFEL outCPUL = (status, iterL, outDFEL, outCPUL)
check outDFE (outCPU:cpul) status iter iterL outDFEL outCPUL
| ((fromMaybe 0 ((!?) outDFE 0)) - outCPU) ** 2 > 0.00001 = check (Data.Vector.drop 1 outDFE) cpul (status + 1) (iter + 1) (iterL Data.List.++ (iter:[])) (outDFEL Data.List.++ ((fromMaybe 0 ((!?) outDFE 0)):[])) (outCPUL Data.List.++ (outCPU:[]))
| otherwise = check (Data.Vector.drop 1 outDFE) cpul status (iter + 1) iterL outDFEL outCPUL
printErrors :: [Int] -> [Double] -> [Double] -> String -> String
printErrors [] [] [] output = output
printErrors (i:is) (x:xs) (y:ys) output = printErrors is xs ys (output Data.List.++ "Output data @ " Data.List.++ (show i) Data.List.++ " = " Data.List.++ (show x) Data.List.++ " (expected " Data.List.++ (show y) Data.List.++ ")\n")
main = do
startTime <- getCurrentTime
startDFETime <- getCurrentTime
-- Make socket
transport <- hOpen ("localhost", PortNumber 9090)
-- Wrap in a protocol
let protocol = BinaryProtocol transport
-- Create a client to use the protocol encoder
let client = (protocol, protocol)
stopTime <- getCurrentTime
putStrLn ("Creating a client and opening connection:\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Generate input data
startTime <- getCurrentTime
let size = 1024
let sizeBytes = size * 4
let dataIn = [fromIntegral(1)..fromIntegral(size)]
stopTime <- getCurrentTime
putStrLn ("Generating input data:\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Initialize maxfile
startTime <- getCurrentTime
e <- try (Client.simple_init client) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let maxfile = getRight e
stopTime <- getCurrentTime
putStrLn ("Initializing maxfile:\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Load DFE
startTime <- getCurrentTime
e <- try (Client.max_load client maxfile (pack "*")) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let engine = getRight e
stopTime <- getCurrentTime
putStrLn ("Loading DFE:\t\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Allocate and send input streams to server
startTime <- getCurrentTime
e <- try (Client.malloc_float client (fromIntegral size)) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let address_dataIn = getRight e
e <- try (Client.send_data_float client address_dataIn (fromList dataIn)) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
stopTime <- getCurrentTime
putStrLn ("Sending input data:\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Allocate memory for output stream on server
startTime <- getCurrentTime
e <- try (Client.malloc_float client (fromIntegral size)) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let address_dataOut = getRight e
stopTime <- getCurrentTime
putStrLn ("Allocating memory for output stream on server:\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Action default
startTime <- getCurrentTime
e <- try (Client.max_actions_init client maxfile (pack "default")) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let actions = getRight e
e <- try (Client.max_set_param_uint64t client actions (pack "N") (fromIntegral size)) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
e <- try (Client.max_queue_input client actions (pack "x") address_dataIn (fromIntegral sizeBytes)) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
e <- try (Client.max_queue_output client actions (pack "y") address_dataOut (fromIntegral sizeBytes)) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
e <- try (Client.max_run client engine actions) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
stopTime <- getCurrentTime
putStrLn ("Simple time:\t\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Unload DFE
startTime <- getCurrentTime
e <- try (Client.max_unload client engine) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
stopTime <- getCurrentTime
putStrLn ("Unloading DFE:\t\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Get output stream from server
startTime <- getCurrentTime
dataOutDFE <- Client.receive_data_float client address_dataOut (fromIntegral size)
stopTime <- getCurrentTime
putStrLn ("Getting output stream:\t(size = " Data.List.++ (show (size * 32)) Data.List.++ " bit)\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
--Free allocated memory for streams on server
startTime <- getCurrentTime
e <- try (Client.free client address_dataIn) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
e <- try (Client.free client address_dataOut) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
stopTime <- getCurrentTime
putStrLn ("Freeing allocated memory for streams on server:\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Free allocated maxfile data
startTime <- getCurrentTime
e <- try (Client.simple_free client) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
stopTime <- getCurrentTime
putStrLn ("Freeing allocated maxfile data:\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Close!
startTime <- getCurrentTime
tClose transport
stopTime <- getCurrentTime
putStrLn ("Closing connection:\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
stopTime <- getCurrentTime
putStrLn ("DFE simple total time:\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startDFETime)))
-- CPU Output
startTime <- getCurrentTime
let dataOutCPU = simpleCPU dataIn
stopTime <- getCurrentTime
putStrLn ("CPU simple total time::\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Checking results
startTime <- getCurrentTime
let (status, iter, dataErrDFE, dataErrCPU) = check dataOutDFE dataOutCPU 0 0 [] [] []
putStr (printErrors iter dataErrDFE dataErrCPU [])
stopTime <- getCurrentTime
putStrLn ("Checking results:\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
if (status == 0)
then putStrLn ("Test successful!")
else do putStrLn ("Test failed " Data.List.++ show status Data.List.++ " times!")
exitWith $ ExitFailure (-1)
| maxeler/maxskins | examples/Simple/client/hs/Dynamic/SimpleClient.hs | bsd-2-clause | 8,580 | 0 | 15 | 1,898 | 2,904 | 1,433 | 1,471 | 155 | 16 |
{-# LANGUAGE OverloadedStrings, TemplateHaskell, DeriveGeneric, ScopedTypeVariables #-}
module Dashboardh.Status(
getJenkins
, getJobs
, getViewJobs
, getJob
, getAvgBuildTime
) where
import Dashboardh.Prelude
import Dashboardh.Job
import Dashboardh.BuildTime
import Dashboardh.Core
import Data.Text (Text)
import Data.List
import Options.Applicative
import Control.Lens hiding (view)
import Control.Lens.Aeson
import Jenkins.REST
import Debug.Trace
getJenkins :: Jenkins [a] -> IO [a]
getJenkins f = do
let opts = Settings "http://localhost" 8090 "" ""
jobs <- runJenkins opts f
case jobs of
Right l -> return l
Left _ -> return []
getJobs :: Jenkins [Text]
getJobs = do
res <- get (json -?- "tree" -=- "jobs[name]")
let jobs = (trace (show res) res) ^.. key "jobs"._Array.each.key "name"._String
concurrentlys (map (\n -> do return $ n) jobs)
getViewJobs :: Text -> Jenkins [Text]
getViewJobs v = do
res <- get (view v `as` json -?- "tree" -=- "jobs[name]")
let jobs = res ^.. key "jobs"._Array.each.key "name"._String
concurrentlys (map (\n -> do return $ n) jobs)
getJob :: Text -> Jenkins [Job]
getJob j = do
res <- get (job j `as` json -?- "tree" -=- "builds[duration,result,builtOn]")
let dur = res ^.. key "builds"._Array.each.key "duration"._Integer
let bOn = res ^.. key "builds"._Array.each.key "builtOn"._String
let rlt = res ^.. key "builds"._Array.each.key "result"._String
concurrentlys (zipWith3 (\a -> \b -> \c -> do return $ Job j (fromInteger a) b c) dur rlt bOn )
getAvgBuildTime :: Text -> Jenkins [BuildTime]
getAvgBuildTime j = do
res <- get (job j `as` json -?- "tree" -=- "builds[duration]")
let dur = res ^.. key "builds"._Array.each.key "duration"._Integer
concurrentlys (return . return $ BuildTime j (length dur) . avg $ map fromInteger dur)
getSimpleJob :: Text -> Text
getSimpleJob =
error("handle jenkins json")
getJobStatus :: Text -> Int
getJobStatus =
error("return current build status")
getJobStatusHistory :: Text -> Int
getJobStatusHistory =
error("return list of build status")
getJobTimeHistory :: Text -> Int
getJobTimeHistory =
error("return list of build times")
| nhibberd/dashboardh | src/Dashboardh/Status.hs | bsd-3-clause | 2,259 | 0 | 19 | 462 | 786 | 398 | 388 | -1 | -1 |
-- | Parsing items.
module Data.Cfg.Item (
AugItem,
Item,
-- * Creation
mkInitialItem,
nextItem,
-- * Query
production,
mark,
atEnd,
beforeMark,
afterMark,
nextV
) where
import Data.Cfg.Augment
import Data.Cfg.Cfg
-- | A parsing \"item\": a partially processed production.
data Item t nt = Item {
mark :: Int,
-- ^ The index showing how many vocabulary term of the production
-- have been processed
production :: Production t nt
-- ^ The production for the 'Item'
}
deriving (Eq, Ord)
-- | An augmented 'Item'.
type AugItem t nt = Item (AugT t) (AugNT nt)
-- | An 'Item' representing the unprocessed production.
mkInitialItem :: Production t nt -> Item t nt
mkInitialItem = Item 0
-- | The 'Item' with one more vocabulary term processed. If the
-- production is already completely processed, you get 'Nothing'.
nextItem :: Item t nt -> Maybe (Item t nt)
nextItem item = if atEnd item
then Nothing
else Just item { mark = 1 + mark item }
-- | An 'Item' representing a completely processed production.
atEnd :: Item t nt -> Bool
atEnd = null . afterMark
-- | The processed vocabulary terms of the production.
beforeMark :: Item t nt -> [V t nt]
beforeMark item = take (mark item) $ productionRhs (production item)
-- | The unprocessed vocabulary terms of the production.
afterMark :: Item t nt -> [V t nt]
afterMark item = drop (mark item) $ productionRhs (production item)
-- | The next vocabulary term to be processed, if there is one.
nextV :: Item t nt -> Maybe (V t nt)
nextV item = case afterMark item of
[] -> Nothing
(v : _) -> Just v
| nedervold/context-free-grammar | src/Data/Cfg/Item.hs | bsd-3-clause | 1,686 | 0 | 10 | 427 | 410 | 223 | 187 | 34 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
module Layers.Flatten where
import Network
import Static
import Util
import Data.Singletons.TypeLits
import Data.Singletons.Prelude.Num
import Data.Serialize
data Flatten = Flatten
instance Serialize Flatten where
put _ = return ()
get = return Flatten
instance Creatable Flatten where
seeded _ = Flatten
instance Updatable Flatten where
type Gradient Flatten = ()
instance ( KnownNat (h :* d :* w), KnownNat bat, KnownNat h, KnownNat d, KnownNat w
, (bat :* h :* d :* w) ~ (bat :* (h :* d :* w))
) => Layer (ZZ ::. bat ::. h ::. d ::. w) Flatten where
type LOutput (ZZ ::. bat ::. h ::. d ::. w) Flatten = ZZ ::. bat ::. (h :* d :* w)
runForward _ arr = sComputeP $ sReshape arr
runBackwards _ _ _ dy =
do dx <- sComputeP $ sReshape dy
return ((), dx)
| jonascarpay/convoluted | src/Layers/Flatten.hs | bsd-3-clause | 946 | 0 | 12 | 211 | 330 | 176 | 154 | -1 | -1 |
module Primitives (primitives) where
import Value (
Value(Nil, Integer, Char, Con, Fun, IOAction, Err), Env, initialize)
primitives :: Env
primitives = initialize [
("+", Fun $ mkBinIntFun (+)),
("-", Fun $ mkBinIntFun (-)),
("*", Fun $ mkBinIntFun (*)),
("div", Fun $ mkBinIntFun div),
("^", Fun $ mkBinIntFun (^)),
("==", Fun $ mkCompIntFun (==)),
(">>", Fun concatMonad),
("return", Fun $ IOAction . return),
("putChar", Fun putCharFun)
]
mkIntFun :: (Integer -> Integer) -> Value -> Value
mkIntFun f (Integer x) = Integer $ f x
mkIntFun _ v = typeError "Integer" v
mkBinIntFun :: (Integer -> Integer -> Integer) -> Value -> Value
mkBinIntFun op (Integer x) = Fun $ mkIntFun $ op x
mkBinIntFun _ v = typeError "Integer" v
mkIntBoolFun :: (Integer -> Bool) -> Value -> Value
mkIntBoolFun p (Integer x)
| p x = Con "True" []
| otherwise = Con "False" []
mkIntBoolFun _ v = typeError "Integer" v
mkCompIntFun :: (Integer -> Integer -> Bool) -> Value -> Value
mkCompIntFun p (Integer x) = Fun $ mkIntBoolFun $ p x
mkCompIntFun _ v = typeError "Integer" v
concatMonad :: Value -> Value
concatMonad (IOAction act1) = Fun fun
where
fun (IOAction act2) = IOAction $ act1 >> act2
fun v = typeError "IO" v
concatMonad v = typeError "IO" v
putCharFun :: Value -> Value
putCharFun (Char c) = IOAction $ putChar c >> return Nil
putCharFun v = typeError "Char" v
typeError :: String -> Value -> Value
typeError t v = Err $ "Couldn't match expected type `" ++ t ++ "': " ++ show v
| YoshikuniJujo/toyhaskell_haskell | src/Primitives.hs | bsd-3-clause | 1,517 | 68 | 9 | 311 | 708 | 375 | 333 | 38 | 2 |
-- | Example 2: Parsing a list of tokens instead of a 'String' and computing
-- the desired result directly.
-- In this example it is assumed that there exists a lexer
-- that goes from @'String' -> 'CToken'@, so that an input
-- 'String' can be fed into the lexer and then into the generated
-- parser.
-- Needed for recursive do notation.
{-# LANGUAGE RecursiveDo #-}
-- Needed for deriving 'Typeable'.
{-# LANGUAGE DeriveDataTypeable #-}
-- Needed for deriving 'Lift'.
{-# LANGUAGE TemplateHaskell #-}
-- Needed for deriving ToSym CToken CToken
{-# LANGUAGE MultiParamTypeClasses, TypeFamilies #-}
module Ex2Calculator where
-- First import the Grempa grammar combinators.
import Data.Parser.Grempa.Grammar
-- We also need the 'ToPat' class to be in scope.
import Data.Parser.Grempa.Static (ToPat(..), toConstrPat)
-- The result datatype must be an instance of the 'Typeable' typeclass.
-- Fortunately, it is possible to derive an instance. Using the extension
-- above.
import Data.Typeable
import Data.Data
-- For deriving 'Lift' instances.
import Language.Haskell.TH.Lift
-- Our token datatype. The parser will operate on a list of those.
data CToken
= Num {unNum :: Integer}
| Plus
| Times
| LParen | RParen
-- Tokens have to have instances of a number of typeclasses ('Data', 'Eq',
-- 'Ord' and 'Show'). When making a static parser, they also have to be
-- members of 'Typeable' and also 'Lift' for 'toConstrPat' to work.
deriving (Data, Eq, Ord, Show, Typeable)
instance ToSym CToken CToken where
type ToSymT CToken CToken = CToken
toSym = STerm
-- Derive a 'Lift' instance
$(deriveLift ''CToken)
-- The tokens of the language we are making a static parser for must have a
-- 'ToPat' instance, which provides a way for Grempa to convert the token
-- to a Template Haskell pattern matching. For tokens that should only be
-- compared on the constructor level, the implementation is easy, as there is
-- a function to do just that in Grempa.
instance ToPat CToken where
toPat = toConstrPat
-- | Our grammar operates on lists of 'CTokens' and returns the 'Integer'
-- result directly, without computing a tree-shaped result.
calc :: Grammar CToken Integer
-- This is very similar to the definition of the previous example, but using
-- operators operating on 'Integer's instead of constructors for the semantic
-- actions.
-- Here we are using 'levels' and 'lrule's which means that the rules will
-- be linked together automatically with identity rules.
calc = levels $ do
rec
e <- lrule [ (+) <@> e <# Plus <#> t ]
t <- lrule [ (*) <@> t <# Times <#> f ]
f <- lrule [ id <@ LParen <#> e <# RParen
, unNum <@> num
]
return e
where
-- We are using the fact that the parser will be able to only look at the
-- constructors when comparing different tokens if we want it to work that
-- way, which is why we can use for example this to represent any number
-- token.
num = Num 0
| ollef/Grempa | examples/Ex2Calculator.hs | bsd-3-clause | 3,072 | 0 | 15 | 691 | 313 | 190 | 123 | 31 | 1 |
{-# LANGUAGE FlexibleContexts,FlexibleInstances #-}
{-
Some functions that are used across the program
-}
module HOCHC.Utils((%),errorPart,fromRight,xor,(>><),check,(?),throwError)
where
import Control.Monad.Except
import qualified Control.Monad.Fail
instance Control.Monad.Fail.MonadFail (Either String) where
fail s = Left s
-- helper function for constructing strings nicely
(%) :: String -> [String] -> String
(%) s [] = s
(%) ('{':'}':s) (x:xs) = x++(s%xs)
(%) ('\\':c:s) xs = c:(s%xs)
(%) (c:s) xs = c:(s%xs)
(%) "" _ = error "not enough '{}' in string"
-- Annotates errors that originate from a region of code
errorPart :: MonadError String m => String -> m a -> m a
errorPart s = (`catchError` (\ e ->throwError ("{} Error:\n{}" % [s,e])))
(?) :: MonadError e m => Maybe a -> e -> m a
(?) = flip$ (`maybe` return) .throwError
--(?) m e = maybe (throwError e) return m
check :: MonadError String m => Bool -> String -> m ()
check cond err = if cond then return () else throwError err
fromRight :: Either a b -> b
fromRight (Right x) = x
(>><) :: Monad m => m a -> (a-> m b) -> m a
xm >>< f = xm >>= (\x -> f x >> return x)
xor True x = not x
xor False x = x
| penteract/HigherOrderHornRefinement | HOCHC/Utils.hs | bsd-3-clause | 1,185 | 1 | 11 | 232 | 530 | 293 | 237 | 24 | 2 |
{-# LANGUAGE ConstraintKinds, DeriveDataTypeable, FlexibleContexts,
OverloadedStrings, PatternGuards #-}
-- | refunds on payins and transfers
module Web.MangoPay.Refunds where
import Web.MangoPay.Monad
import Web.MangoPay.Payins
import Web.MangoPay.Types
import Web.MangoPay.Users
import Web.MangoPay.Wallets
import Data.Text
import Data.Typeable (Typeable)
import Data.Aeson
import Control.Applicative
-- | refund a transfer
refundTransfer :: (MPUsableMonad m) => TransferId -> AnyUserId -> AccessToken -> MangoPayT m Refund
refundTransfer tid authId at= do
url<-getClientURLMultiple ["/transfers/",tid,"/refunds"]
postExchange url (Just at) (RefundRequest authId Nothing Nothing)
-- | refund a pay-in
refundPayin :: (MPUsableMonad m) => AnyPayinId -> RefundRequest -> AccessToken -> MangoPayT m Refund
refundPayin pid rr at= do
url<-getClientURLMultiple ["/payins/",pid,"/refunds"]
postExchange url (Just at) rr
-- | fetch a refund from its Id
fetchRefund :: (MPUsableMonad m) => RefundId -> AccessToken -> MangoPayT m Refund
fetchRefund = fetchGeneric "/refunds/"
-- | refund request
data RefundRequest=RefundRequest{
rrAuthorId :: AnyUserId -- ^ The user Id of the author
,rrDebitedFunds :: Maybe Amount -- ^ Strictly positive amount. In cents.
,rrFees :: Maybe Amount -- ^ In cents
}deriving (Show,Eq,Ord,Typeable)
-- | to json as per MangoPay format
instance ToJSON RefundRequest where
toJSON rr=objectSN ["AuthorId" .= rrAuthorId rr,"DebitedFunds" .= rrDebitedFunds rr,
"Fees" .= rrFees rr]
-- | id of a refund
type RefundId = Text
-- | refund of a transfer
data Refund=Refund{
rId :: RefundId -- ^ Id of the refund
,rCreationDate :: MpTime
,rTag :: Maybe Text -- ^ Custom data
,rAuthorId :: AnyUserId -- ^ The user Id of the author
,rDebitedFunds :: Amount -- ^ Strictly positive amount. In cents.
,rFees :: Amount -- ^ In cents
,rCreditedFunds :: Amount -- ^ In cents
,rStatus :: TransferStatus
,rResultCode :: Text -- ^ The transaction result code
,rResultMessage :: Maybe Text -- ^ The transaction result Message
,rExecutionDate :: Maybe MpTime
,rType :: TransactionType
,rNature :: TransactionNature
,rCreditedUserId :: Maybe AnyUserId -- ^ Id of the user owner of the credited wallet
,rInitialTransactionId :: TransactionId -- ^ Id of the transaction being refunded
,rInitialTransactionType :: TransactionType -- ^ The type of the transaction before being refunded (PayIn, Refund)
,rDebitedWalletId :: WalletId -- ^ The Id of the debited Wallet
,rCreditedWalletId :: Maybe WalletId -- ^ The Id of the credited Wallet
,rReason :: RefundReason -- ^ The reason from the refund, since <http://docs.mangopay.com/release-lapin/>
} deriving (Show,Eq,Ord,Typeable)
-- | from json as per MangoPay format
instance FromJSON Refund where
parseJSON (Object v) =Refund <$>
v .: "Id" <*>
v .: "CreationDate" <*>
v .:? "Tag" <*>
v .: "AuthorId" <*>
v .: "DebitedFunds" <*>
v .: "Fees" <*>
v .: "CreditedFunds" <*>
v .: "Status" <*>
v .: "ResultCode" <*>
v .:? "ResultMessage" <*>
v .:? "ExecutionDate" <*>
v .: "Type" <*>
v .: "Nature" <*>
v .:? "CreditedUserId" <*>
v .: "InitialTransactionId" <*>
v .: "InitialTransactionType" <*>
v .: "DebitedWalletId" <*>
v .:? "CreditedWalletID" <*>
v .: "RefundReason"
parseJSON _=fail "Refund"
-- | Type for redund reason, since <http://docs.mangopay.com/release-lapin/>.
data RefundReasonType =
BANKACCOUNT_HAS_BEEN_CLOSED
| INITIALIZED_BY_CLIENT
| OTHER
deriving (Show,Read,Eq,Ord,Bounded,Enum,Typeable)
-- | to json as per MangoPay format
instance ToJSON RefundReasonType where
toJSON =toJSON . show
-- | from json as per MangoPay format
instance FromJSON RefundReasonType where
parseJSON = jsonRead "RefundReasonType"
-- | Reason for a refund, since <http://docs.mangopay.com/release-lapin/>.
data RefundReason = RefundReason
{ rrMessage :: Maybe Text
, rrType :: RefundReasonType
} deriving (Show,Eq,Ord,Typeable)
-- | from json as per MangoPay format
instance FromJSON RefundReason where
parseJSON (Object v) =RefundReason <$>
v .:? "RefundReasonMessage" <*>
v .: "RefundReasonType"
parseJSON _=fail "RefundReason"
| prowdsponsor/mangopay | mangopay/src/Web/MangoPay/Refunds.hs | bsd-3-clause | 4,998 | 0 | 43 | 1,508 | 913 | 507 | 406 | 92 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- This modules keeps track of the probabilities of a word following two other
-- words. This can be used to generate a third item from a sequence of two
-- items, and by extension, a string of many items.
module Text.Ribot.Mimic
( mimic
, FreqMap
, getWeightedChoice
) where
import Control.Monad (liftM)
import qualified Data.Either as E
import qualified Data.List as L
import qualified Data.Map as M
import Data.Maybe
import qualified Data.Ord as O
import qualified Data.Set as S
import qualified Data.Text as T
import Database.Ribot
import System.Random
-- [Text.Ribot.Tokenizer](Tokenizer.html)
import Text.Ribot.Tokenizer
import qualified Text.Bakers12.Tokenizer.Types as BT12
-- This takes a list of message texts and a number of items to generate. It
-- tokenizes the messages, breaks them into triples, and creates a
-- `TextGenerator` from that data. It uses that generator to assemble a
-- mimicking utterance for the person.
mimic :: [Message] -> IO [T.Text]
mimic [] = return []
mimic msgs = chain tg "#" 12
where
tokens = map (map BT12.tokenText)
. E.rights
. map (tokenize "" . messageText)
$ msgs
trips = L.concatMap (triples "#") tokens
tg = mkTextGenerator trips
-- This is a frequency map between an item and its frequency.
type FreqMap a = M.Map a Int
-- This is a mapping between two observations and a `FreqMap` of the items that
-- immediately follow it.
type Model a = M.Map (a, a) (FreqMap a)
-- This is the data type for holding the information about the statistical
-- model. Currently, this is just a stub.
data TextGenerator a = TextGenerator (Model a)
-- This is the constructor for `TextGenerator` data. It takes a set of data to
-- train on and returns the constructed generator.
mkTextGenerator :: Ord a => [(a, a, a)] -> TextGenerator a
mkTextGenerator = TextGenerator . L.foldl' combine M.empty
where
-- This handles one step of the fold.
combine :: Ord a => Model a -> (a, a, a) -> Model a
combine hmm (obs1, obs2, nextStep) =
M.alter (combine' nextStep) (obs1, obs2) hmm
-- This increments a sequence's `FreqMap`.
combine' :: Ord a => a -> Maybe (FreqMap a) -> Maybe (FreqMap a)
combine' item Nothing = Just $ M.singleton item 1
combine' item (Just fm) = Just $ M.alter incr item fm
-- This increments an item's count in a `FreqMap`.
incr :: Maybe Int -> Maybe Int
incr Nothing = Just 1
incr (Just n) = Just (n + 1)
-- This returns the continuations for a leading sequence in descending order of
-- frequency.
getContinuationList :: Ord a => TextGenerator a -> (a, a) -> Maybe [(a, Int)]
getContinuationList (TextGenerator hmm) leadingSeq =
return . L.sortBy (O.comparing key) . M.toList =<< M.lookup leadingSeq hmm
where
key :: (a, Int) -> Int
key (_, i) = -i
{- Defined but not used
-- This returns the most likely continuation of the sequence given. If there's
-- no data for that sequence, `Nothing` is returned.
mostLikely :: Ord a => TextGenerator a -> (a, a) -> Maybe a
mostLikely textGen leadingSeq =
fmap fst . listToMaybe =<< getContinuationList textGen leadingSeq
-}
-- This returns a continuation for the sequence. It does using a random
-- selecting weighted by the frequency of each continuation. If there aren't
-- any continuations, `Nothing` is returned.
randomContinuation :: Ord a => TextGenerator a -> (a, a) -> IO (Maybe a)
randomContinuation textGen leadingSeq =
case getContinuationList textGen leadingSeq of
Nothing -> return Nothing
Just cont -> liftM (getWeightedChoice cont) randomIO
-- This takes a list of items with weights and a percent as a fraction and
-- returns the item with that weighted amount. It walks through the list and
-- takes the item for which the running weight total takes it over the weight
-- fraction passed in.
getWeightedChoice :: [(a, Int)] -> Double -> Maybe a
getWeightedChoice choices cutOff =
-- This is a mouthful, and you have to read it backwards. Here's what it
-- does:
--
-- * `L.mapAccumL accum 0 choices` — Convert the weights in the list of
-- choices into a percentage of the total weights in the list;
-- * `snd` — Drop off the accumulator's state, the running total;
-- * `L.dropWhile ((< cutOff) . snd)` — Remove all of the items from the
-- front of the list until we get to the item whose running weight total
-- percentage is at or above the cut off (i.e., remove all the items up to
-- the one we want);
-- * `map fst` — Remove the running weighted total percentage and save only
-- the item;
-- * `listToMaybe` — If there is anything in the list, take the head and
-- wrap it in `Just`; otherwise, return `Nothing`.
listToMaybe
. map fst
. L.dropWhile ((< cutOff) . snd)
. snd
$ L.mapAccumL accum 0 choices
where
total = fromIntegral . L.sum $ map snd choices
-- This handles accumulating the running total information. It takes
-- each pair from the choice list and replaces the weight with the
-- running percentage of the weight totals.
accum :: Int -> (a, Int) -> (Int, (a, Double))
accum running (item, weight) =
(running', (item, fromIntegral running' / total))
where running' = running + weight
-- This creates a chain of items. The item given is the item to use for
-- placeholders at the beginning and end of sequences. The number is the number
-- of tokens to output in the sequence.
--
-- First, this will get all the sequences in the generator that start with the
-- initial character. It then picks one at random. From there, it keeps calling
-- `randomConintuation` until it doesn't need more items.
--
-- This is kind of messy. I'm not very happy with it.
chain :: Ord a => Show a => TextGenerator a -> a -> Int -> IO [a]
chain textGen@(TextGenerator tg) start n = do
(_, nextItem) <- randomElem nextPairs
return . filter (/= start) =<< chain' textGen start start nextItem n
where
-- This is a list of all the sequences in the model that begin with
-- `start`.
nextPairs = L.filter ((start ==) . fst) $ M.keys tg
-- This is a set of all the options. It's used to randomly select a
-- continuation for where there are holes in the model.
itemSet :: Ord b => Model b -> S.Set b
itemSet m = S.fromList . L.concatMap (uncurry getItems) $ M.toList m
where
getItems :: Ord c => (c, c) -> FreqMap c -> [c]
getItems (t1, t2) freqMap = t1 : t2 : M.keys freqMap
-- Just `itemSet` as a list so we can easily pick one at random.
itemList :: Ord d => Model d -> [d]
itemList = S.toList . itemSet
randomElem :: Ord a => [a] -> IO a
randomElem list =
liftM (list !!) (randomRIO (0, length list - 1))
-- This constructs a list/stream that pulls.
chain' :: Ord e => Show e => Eq e =>
TextGenerator e -> e -> e -> e -> Int -> IO [e]
chain' _ _ _ _ 0 = return []
chain' tg'@(TextGenerator m) filterOut token1 token2 thisN = do
contMaybe <- randomContinuation tg' (token1, token2)
case contMaybe of
Just token3 -> do
rest <- chain' tg' filterOut token2 token3 n'
return (token1 : rest)
Nothing -> do
token3 <- randomElem (itemList m)
rest <- chain' tg' filterOut token2 token3 n'
return (token1 : rest)
where n' = if token1 == filterOut
then thisN
else thisN - 1
-- This breaks a list of items into a list of overlapping triples. The first
-- position is for a boundary marker, so this call,
--
-- triples -1 [1..5]
-- [(-1, -1, 1), (-1, 1, 2), (1, 2, 3), (2, 3, 4), (3, 4, 5), (4, 5, -1),
-- (5, -1, -1)]
triples :: a -> [a] -> [(a, a, a)]
triples fill xs = L.zip3 xxs (L.drop 1 xxs) (L.drop 2 xxs)
where xxs = (fill : fill : xs) ++ [fill, fill]
| erochest/ribot | src/Text/Ribot/Mimic.hs | bsd-3-clause | 8,354 | 0 | 17 | 2,302 | 1,690 | 914 | 776 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module Ariadne.Macros where
import Language.Haskell.TH
import Data.Generics.Zipper
import Control.Monad
justE e = [| Just $e |]
cmatch :: Q Exp -> Q Exp
cmatch exp = do
ConE conNm <- exp
varNm <- newName "e"
lamCaseE [ match (asP varNm $ recP conNm []) (normalB $ justE $ varE varNm) []
, match wildP (normalB [| Nothing |]) []
]
zmatch :: Q Exp -> Q Exp
zmatch exp = [| getHole >=> $(cmatch exp) |]
| apsk/ariadne | src/Ariadne/Macros.hs | bsd-3-clause | 494 | 0 | 13 | 138 | 174 | 91 | 83 | 14 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
module Sequent.Env
( EnvT, evalEnvT
, Env, evalEnv
, next
) where
import Control.Monad.State.Lazy (MonadState, StateT, evalStateT, get,
modify)
import Control.Monad.Trans (MonadTrans)
import Control.Monad.Writer (MonadWriter)
import Data.Functor.Identity (Identity, runIdentity)
-- TODO replace with a SupplyT ?
newtype EnvT m a = EnvT { runEnv :: StateT Int m a }
deriving ( Functor
, Applicative
, Monad
, MonadTrans
, MonadState Int
, MonadWriter w)
type Env = EnvT Identity
evalEnvT :: (Monad m) => EnvT m a -> m a
evalEnvT env = evalStateT (runEnv env) 0
evalEnv :: Env a -> a
evalEnv = runIdentity . evalEnvT
next :: (Monad m) => EnvT m Int
next = do
x <- get
modify succ
return x
| matthieubulte/sequent | src/Sequent/Env.hs | bsd-3-clause | 1,062 | 0 | 7 | 352 | 259 | 147 | 112 | 30 | 1 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE PatternSynonyms #-}
module Language.Indescript.Parser.Lexer (
Token(..)
, LinkedPos
, PosedToken
, lexIndescript
) where
import Data.Char (ord)
import Text.Regex.Applicative
import Language.Indescript.Syntax
import Language.Indescript.Parser.Pos
-- # Charsets
-- ## Number Charsets
binit = ['0'..'1']
octit = ['0'..'7']
digit = ['0'..'9']
hexit = digit ++ ['A'..'F'] ++ ['a'..'f']
cBinit = symClass binit
cOctit = symClass octit
cDigit = symClass digit
cHexit = symClass hexit
-- ## Identifier Charsets
asciiSmall = ['a'..'z'] ++ ['_']
asciiLarge = ['A'..'Z']
cSmall = symClass asciiSmall
cLarge = symClass asciiLarge
cIdChar = cSmall <|> cLarge <|> cDigit <|> sym '\''
-- ## Sepcial Charcters
special = "(),;[]`{}"
cSpecial = symClass special
-- ## Symbol Charsets
asciiSym = "!#$%&*+./<=>?@\\^|-~:"
cSymbol = symClass asciiSym
-- ## White Charsets
cTab = sym '\t'
cSpace = sym ' '
cWhite = cTab <|> cSpace
cLine = symClass "\n\r\f"
-- # Regular Expressions
-- ## Numbers
pBin = some cBinit
pOct = some cOctit
pDec = some cDigit
pHex = some cHexit
pInteger :: RE Char (Lit, Int)
pInteger = (string "0b" <|> string "0B") *> fmap (from 2) pBin
<|> (string "0o" <|> string "0o") *> fmap (from 8) pOct
<|> (string "0x" <|> string "0x") *> fmap (from 16) pHex
<|> fmap (from 10) pDec
where
from :: Int -> String -> (Lit, Int)
from base s = (LInt $ fold base s, length s)
fold :: Int -> String -> Int
fold base str = foldl (\s d -> s * base + readDigit d) 0 str
readDigit :: Char -> Int
readDigit c = let
try lo up = let dx = ord c - ord lo
in if dx < up then Just dx else Nothing
x = try '0' 10
<|> (+10) <$> try 'A' 6
<|> (+10) <$> try 'a' 6
in case x of Just x' -> x'; Nothing -> impossible
pFloat :: RE Char (Lit, Int)
pFloat = let str = pDec <++> string "." <++> pDec
in fmap ((,) <$> LFloat . read <*> length) str
-- ## Identifiers
pVarId = cSmall <:> many cIdChar
pConId = cLarge <:> many cIdChar
-- pConSym should always precede pVarSym.
pConSym = sym ':' <:> many cSymbol
pVarSym = some cSymbol
-- ## Special Tokens
pSpecial = cSpecial
-- ## State Transititon Trigger
tString = string "\""
tChar = string "'"
tBlockComment = string "{-"
-- # Rules
type TokenResult = Either Trigger (Token, Int)
token :: RE Char TokenResult
token = triggers <|> right vars <|> right lits <|> right pncs
where
triggers = (pure $ Left TrString) <* tString
<|> (pure $ Left TrChar) <* tChar
<|> (pure $ Left TrComment) <* tBlockComment
<|> (pure $ Left TrLine) <* cLine
<|> white
right = fmap Right
vars = reserve VarId keys <$> pVarId
<|> reserve ConId [] <$> pConId
<|> reserve ConSym ops1 <$> pConSym
<|> reserve VarSym ops2 <$> pVarSym
where
reserve con set x = if x `elem` set
then (TkRsv x, length x)
else (TkVar (con x), length x)
keys = [ "let", "in", "where", "case", "of", "do"
, "if", "then", "else"
, "infix", "infixl", "infixr"
, "data", "type", "forall", "newtype", "_"
, "class", "instance", "deriving"
, "module", "import"]
ops1 = [":", "::"]
ops2 = [ "..", "=", "\\", "|", "<-", "->", "@", "~", "=>" ]
lits = fmap tokenify (pInteger <|> pFloat)
where tokenify (lit, w) = (TkLit lit, w)
pncs = flip fmap pSpecial $ fmap (, 1) (TkRsv . (:[]))
white = (Left . TrWhite . toInt) <$> some cWhite
where
toInt = sum . map getInt
getInt ' ' = 1
getInt '\t' = 4
getInt _ = impossible
tokens :: String -> SourcePoint -> Maybe [(Token, ElemPos)]
tokens "" _ = Just []
tokens s pnt = case findLongestPrefix token s of
Just (Right (t, w), s') -> let
pos = ElemPos pnt $ SourceSpan (0, w)
in fmap ((t, pos):) $ tokens s' $ endPoint pos
Just (Left trigger, s') -> case trigger of
TrWhite w -> tokens s' $ incCol w pnt
TrLine -> tokens s' $ SourcePoint (1 + srcRow pnt, 1)
_ -> error "not supported yet"
Nothing -> Nothing
lexTokens :: String -> Maybe [(Token, ElemPos)]
lexTokens = flip tokens $ SourcePoint (1, 1)
-- # Postprocessing
-- ## Layout Resolution
resolveLayout :: [(Token, ElemPos)] -> Maybe [(Token, ElemPos)]
resolveLayout = flip layout [] . insertIndents
-- ### Insert Indents
data Indent = NextLv Int
| SameLv Int
deriving (Eq, Show)
insertIndents :: [(Token, ElemPos)] -> [Either Indent (Token, ElemPos)]
insertIndents [] = []
insertIndents (tok:toks) = case indent (tok:toks) of
Just x -> Right tok : Left x : insertIndents toks
Nothing -> Right tok : insertIndents toks
where
isBlockStart (TkRsv s, _) = s `elem` ["let", "where", "do", "of"]
isBlockStart _ = False
indent :: [(Token, ElemPos)] -> Maybe Indent
indent (t:ts)
| isBlockStart t, [] <- ts = Just $ NextLv 0
| isBlockStart t, ((_, p) : _) <- ts
= Just $ NextLv $ startCol p
indent ((_, p1) : (_, p2) : _)
| startRow p1 < startRow p2 = Just $ SameLv $ startCol p2
indent _ = Nothing
-- ### Layout Algorithm
-- A direct translation of the layout algorithm presented
-- in the Haskell 2010 Language Report. Modified to handle “in”.
-- Pattern Synonyms
pattern Same n = Left (SameLv n)
pattern Next n = Left (NextLv n)
pattern TokP s <- Right (TkRsv s, _)
layout :: [Either Indent (Token, ElemPos)] -> [Int] -> Maybe [(Token, ElemPos)]
-- L (<n>:ts) (m:ms) = ; : (L ts (m:ms)) if m = n
-- = } : (L (<n>):ts ms) if n < m
-- L (<n>:ts) ms = L ts ms
layout tts@((Same n):ts) (m:ms)
| m == n = (semicolon:) <$> layout ts (m:ms)
| n < m = (rbrace:) <$> layout tts ms
layout ((Same _):ts) ms = layout ts ms
-- L ({n}:ts) (m:ms) = { : (L ts (n:m:ms)) if n > m
-- L ({n}:ts) [] = { : (L ts [n]) if n > 0
-- L ({n}:ts) ms = { : } : (L (<n>:ts) ms)
layout ((Next n):ts) (m:ms)
| n > m = (lbrace:) <$> layout ts (n:m:ms)
layout ((Next n):ts) []
| n > 0 = (lbrace:) <$> layout ts [n]
layout ((Next n):ts) ms = do
rest <- layout (Same n:ts) ms
return $ lbrace:rbrace:rest
-- Special rule for “in”:
-- L (t:in:ts) (m:ms) = } : L (in:ts ms) if t ≠ { and m ≠ 0
layout (t1:t2:ts) (m:ms)
| Right t1' <- t1, notRbrace t1
, TokP "in" <- t2
, m /= 0 = (\rest -> t1':rbrace:rest) <$> layout (t2:ts) ms
where notRbrace (TokP "}") = False
notRbrace _ = True
-- L (}:ts) (0:ms) = } : (L ts ms)
-- L (}:ts) ms = parse-error
layout ((TokP "}"):ts) mms
| 0:ms <- mms = (rbrace:) <$> layout ts ms
| otherwise = Nothing
-- L ({:ts) ms = { : (L ts (0:ms))
-- L (t:ts) (m:ms) = } : (L (t:ts) ms) if m ≠ 0 and parse-error(t)
-- A parse-error only occurs when an explicit right brace matches a non-explicit
-- left brace, as guarded below.
layout (t:ts) mms@(m:ms)
| TokP "{" <- t, Right lb <- t
= (lb:) <$> layout ts (0:mms)
| TokP "}" <- t, m /= 0
= (rbrace:) <$> layout (t:ts) ms
-- L (t:ts) ms = t : (L ts ms)
-- L [] [] = []
-- L [] (m:ms) = } : (L [] ms) if m≠0
layout ((Right t):ts) ms = (t:) <$> layout ts ms
layout [] [] = Just []
layout [] (m:ms)
| m /= 0 = (rbrace:) <$> layout [] ms
layout _ _ = impossible
semicolon = (TkRsv ";", nullPos pesudoPoint)
lbrace = (TkRsv "{", nullPos pesudoPoint)
rbrace = (TkRsv "}", nullPos pesudoPoint)
-- ## Position Resolution
resolvePosition :: [(Token, ElemPos)] -> [PosedToken]
resolvePosition = fst . resolve
where
resolve :: [(Token, ElemPos)] -> ([PosedToken], LinkedPos)
resolve [] = ([], [])
resolve ((t1, p1) : (t2, p2) : xs)
| startPoint p2 == pesudoPoint = let
(xs', ps') = resolve xs
p2' = nullPos (if t2 == TkRsv "{"
then startPoint $ head ps'
else endPoint p1) : ps'
p1' = p1:p2'
in ((t1, p1') : (t2, p2') : xs', p1')
resolve ((t, p):xs) = let
(ts', ps') = resolve xs
p' = p:ps'
in ((t, p') : ts', p')
-- # Interface
-- ## Interface Datatypes
data Token = TkLit Lit
| TkVar Var
| TkRsv String
deriving (Eq, Show)
type LinkedPos = [ElemPos]
type PosedToken = (Token, LinkedPos)
-- ## Interface Functions
lexIndescript :: String -> Maybe [PosedToken]
lexIndescript src = lexTokens src >>= resolveLayout >>= (pure . resolvePosition)
-- # Regex.Applicative combinator extension
symClass :: [Char] -> RE Char Char
symClass cls = psym (\x -> x `elem` cls)
(<++>) = liftA2 (++)
(<:>) = liftA2 (:)
infixr <++>
infixr <:>
-- # Helper Datatypes and Funcitons
data Trigger = TrString
| TrChar
| TrComment
| TrWhite Int
| TrLine
deriving (Eq, Show)
-- TODO: Move to a general module.
impossible = error "Confident impossibility."
| notcome/indescript | src/Language/Indescript/Parser/Lexer.hs | bsd-3-clause | 9,280 | 0 | 17 | 2,705 | 3,365 | 1,792 | 1,573 | 204 | 5 |
{-# LANGUAGE QuasiQuotes #-}
import LiquidHaskell
-- Note: `partialmeasureOld.hs` works fine
[lq| cons :: x:a -> _ -> {v:[a] | hd v = x} |]
cons x xs = x : xs
[lq| test :: {v:_ | hd v = 0} |]
test :: [Int]
test = cons 0 [1,2,3,4]
[lq| measure hd |]
hd :: [a] -> a
hd (x:_) = x
-- Instead of rejecting, can we just default to "un-refined" constructors?
-- Strengthened constructors
-- data [a] where
-- [] :: [a] -- as before
-- (:) :: x:a -> xs:[a] -> {v:[a] | hd v = x}
| spinda/liquidhaskell | tests/gsoc15/unknown/pos/partialmeasure.hs | bsd-3-clause | 508 | 0 | 7 | 138 | 103 | 64 | 39 | 10 | 1 |
-- A significant part of this code has been borrowed from other
-- hakyll users, mostly Jasper through his site and hakyll's,
-- but also skybluetrades.net and chromaticleaves.com
{-# LANGUAGE OverloadedStrings #-}
import Control.Applicative ((<$>))
import Data.Char
import Data.Maybe (catMaybes)
import Data.Monoid (mappend, (<>), mconcat, mempty)
import Data.Time.Format (formatTime)
import Data.Time.Clock (getCurrentTime)
import Hakyll
import Hakyll.Web.Tags
import System.Locale (defaultTimeLocale)
import Text.Blaze.Html (toHtml, toValue, (!))
import Text.Blaze.Html.Renderer.String (renderHtml)
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Text.Pandoc
import Text.Pandoc.Options
import GHC.IO.Encoding
main :: IO ()
main = do
setLocaleEncoding utf8
setFileSystemEncoding utf8
setForeignEncoding utf8
year <- getCurrentYear
hakyllWith config $ do
match "favicon.ico" $ do
route idRoute
compile copyFileCompiler
match "images/*" $ do
route idRoute
compile copyFileCompiler
match "js/*" $ do
route idRoute
compile copyFileCompiler
match "css/*.css" $ do
route idRoute
compile compressCssCompiler
match "css/*.ttf" $ do
route idRoute
compile copyFileCompiler
match "templates/*" $ compile templateCompiler
-- build tags
tags <- buildTags "posts/*" (fromCapture "tags/*.html")
match "posts/*" $ do
route $ setExtension "html"
compile $ myPandocCompiler
>>= saveSnapshot "content"
>>= loadAndApplyTemplate "templates/post.html" (postCtx tags)
>>= loadAndApplyTemplate "templates/default.html" (defaultContext `mappend` yearCtx year)
>>= relativizeUrls
match "pages/*" $ do
route $ gsubRoute "pages/" (const "") `composeRoutes`
setExtension ".html"
compile $ myPandocCompiler
>>= loadAndApplyTemplate "templates/page.html" defaultContext
>>= loadAndApplyTemplate "templates/default.html" (defaultContext `mappend` yearCtx year)
>>= relativizeUrls
create ["rss.xml"] $ do
route idRoute
compile $ do
loadAllSnapshots "posts/*" "content"
>>= fmap (take 10) . recentFirst
>>= renderRss myFeedConfiguration feedCtx
-- create a listing of all posts, most recent first
create ["posts.html"] $ do
route idRoute
compile $ do
posts <- recentFirst =<< loadAll "posts/*"
let ctx = constField "title" "Posts" <>
listField "posts" (postCtx tags) (return posts) <>
defaultContext
makeItem ""
>>= loadAndApplyTemplate "templates/posts.html" ctx
>>= loadAndApplyTemplate "templates/default.html" (ctx `mappend` yearCtx year)
>>= relativizeUrls
-- Post tags
tagsRules tags $ \tag pattern -> do
let title = "Posts tagged " ++ tag
-- Copied from posts, need to refactor
route idRoute
compile $ do
posts <- recentFirst =<< loadAll pattern
let ctx = constField "title" title <>
listField "posts" (postCtx tags) (return posts) <>
defaultContext
makeItem ""
>>= loadAndApplyTemplate "templates/posts.html" ctx
>>= loadAndApplyTemplate "templates/default.html" (ctx `mappend` yearCtx year)
>>= relativizeUrls
match "index.html" $ do
route idRoute
compile $ do
posts <- fmap (take 5) . recentFirst =<< loadAll "posts/*"
let indexCtx =
listField "posts" (postCtx tags) (return posts) <>
field "tagcloud" (\_ -> myTagCloud tags) <>
defaultContext
getResourceBody
>>= applyAsTemplate indexCtx
>>= loadAndApplyTemplate "templates/default.html" indexCtx
>>= relativizeUrls
-- -----------------------------------------------------------------------------
-- * Contexts
-- | Creates a "year" context from a string representation of the current year
yearCtx :: String -> Context String
yearCtx year = field "year" $ \item -> return year
feedCtx :: Context String
feedCtx = mconcat
[ bodyField "description"
, defaultContext
]
postCtx :: Tags -> Context String
postCtx tags = mconcat
[ modificationTimeField "mtime" "%e %b %Y"
, dateField "date" "%B %e, %Y"
, myTagsField "tags" tags
, defaultContext
]
pageCtx :: Context String
pageCtx = mconcat
[ modificationTimeField "mtime" "%e %b %Y"
, defaultContext
]
-- -----------------------------------------------------------------------------
-- * Feed configuration
-- | Holds my feed's configuration
myFeedConfiguration :: FeedConfiguration
myFeedConfiguration = FeedConfiguration
{ feedTitle = "Joey's Computing Blog"
, feedDescription = "Haskell, Elm, Theory and More!"
, feedAuthorName = "Joey Eremondi"
, feedAuthorEmail = "[email protected]"
, feedRoot = "http://eremondi.com"
}
-- -----------------------------------------------------------------------------
-- * Compilers
-- | Creates a compiler to render a list of posts for a given pattern, context,
-- and sorting/filtering function
postList :: Pattern
-> Context String
-> ([Item String] -> Compiler [Item String])
-> Compiler String
postList pattern postCtx sortFilter = do
posts <- sortFilter =<< loadAll pattern
itemTpl <- loadBody "templates/post-item.html"
applyTemplateList itemTpl postCtx posts
-- -----------------------------------------------------------------------------
-- * Helpers
--
getCurrentYear :: IO String
getCurrentYear = formatTime defaultTimeLocale "%Y" <$> getCurrentTime
myTagCloud :: Tags -> Compiler String
myTagCloud tags =
renderTagCloud 80 250 tags
myTagsField :: String -> Tags -> Context a
myTagsField =
tagsFieldWith getTags renderOneTag $ \tagLinks -> do
H.ul ! A.class_ "list-inline" $ do
H.li $ H.i ! A.class_ "fa fa-tags" $ mempty
sequence_ tagLinks
renderOneTag :: String -> Maybe FilePath -> Maybe H.Html
renderOneTag _ Nothing = Nothing
renderOneTag tag (Just filepath) =
Just $ H.li $
H.a ! A.href (toValue $ toUrl filepath) $ toHtml tag
--------------------------------------------------------------------------------
config :: Configuration
config = defaultConfiguration
{ deployCommand = "rsync -avz -e ssh ./_site/ \
\ [email protected]:public_html/"
}
myPandocCompiler' :: Maybe String -> Compiler (Item String)
myPandocCompiler' withToc =
pandocCompilerWith (defaultHakyllReaderOptions {readerSmart = False} ) $
case withToc of
Just x | map toLower x `elem` ["true", "yes"] -> writerWithToc
| otherwise -> writerOpts
Nothing -> writerOpts
where writerOpts = defaultHakyllWriterOptions
{ writerReferenceLinks = True
, writerSectionDivs = True
, writerHtml5 = True
, writerHTMLMathMethod = MathJax "http://cdn.mathjax.org/mathjax/latest/MathJax.js"
, writerColumns = 100
}
writerWithToc =
writerOpts { writerTableOfContents = True
, writerTemplate = "$if(toc)$<div id=\"toc\"><h3>Table of contents</h3>$toc$</div>$endif$\n$body$"
, writerStandalone = True
}
myPandocCompiler :: Compiler (Item String)
myPandocCompiler = do
ident <- getUnderlying
myPandocCompiler' =<< getMetadataField ident "toc"
--------------------------------------------------------------------------------
| JoeyEremondi/eremondi.com-hakyll | site.hs | bsd-3-clause | 8,484 | 15 | 24 | 2,648 | 1,669 | 835 | 834 | 167 | 2 |
module Import
( module Import
) where
import Prelude as Import hiding (head, init, last,
readFile, tail, writeFile)
import Yesod as Import hiding (Route (..))
import Control.Applicative as Import (pure, (<$>), (<*>))
import Data.Text as Import (Text)
import Foundation as Import
import Model as Import
import Settings as Import
import Settings.Development as Import
import Settings.StaticFiles as Import
import Widgets as Import
import Utility as Import
#if __GLASGOW_HASKELL__ >= 704
import Data.Monoid as Import
(Monoid (mappend, mempty, mconcat),
(<>))
#else
import Data.Monoid as Import
(Monoid (mappend, mempty, mconcat))
infixr 5 <>
(<>) :: Monoid m => m -> m -> m
(<>) = mappend
#endif
| jabaraster/MetShop | Import.hs | bsd-3-clause | 1,154 | 0 | 6 | 565 | 152 | 112 | 40 | 19 | 1 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
module Ast (
AstIn(..)
, AstOut(..)
) where
import Util.TypeList
import Ast.Kind
import Ast.Type
import Ast.Pattern
import Ast.Term
class AstIn (k :: j) where
type KindList k :: [(* -> *) -> * -> *]
type TypeList k :: [((* -> *) -> * -> *) -> (* -> *) -> * -> *]
type TypeSchemeList k :: [((* -> *) -> * -> *) -> (* -> *) -> * -> *]
type PatternList k :: [(* -> *) -> * -> *]
type TermList k :: [((* -> *) -> * -> *) -> (((* -> *) -> * -> *) -> (* -> *) -> * -> *) -> ((* -> *) -> * -> *) -> (* -> *) -> * -> *]
instance AstIn '[] where
type KindList '[] = '[]
type TypeList '[] = '[]
type TypeSchemeList '[] = '[]
type PatternList '[] = '[]
type TermList '[] = '[]
instance (AstIn k, AstIn ks) => AstIn (k ': ks) where
type KindList (k ': ks) = Append (KindList k) (KindList ks)
type TypeList (k ': ks) = Append (TypeList k) (TypeList ks)
type TypeSchemeList (k ': ks) = Append (TypeSchemeList k) (TypeSchemeList ks)
type PatternList (k ': ks) = Append (PatternList k) (PatternList ks)
type TermList (k ': ks) = Append (TermList k) (TermList ks)
class AstOut (k :: j) where
type RKindF k :: ((* -> *) -> * -> *)
type RTypeF k :: (((* -> *) -> * -> *) -> (* -> *) -> * -> *)
type RTypeSchemeF k :: (((* -> *) -> * -> *) -> (* -> *) -> * -> *)
type RPatternF k :: ((* -> *) -> * -> *)
type RTermF k :: (((* -> *) -> * -> *) -> (((* -> *) -> * -> *) -> (* -> *) -> * -> *) -> ((* -> *) -> * -> *) -> (* -> *) -> * -> *)
type RKind k :: (* -> *)
type RType k :: (* -> *)
type RTypeScheme k :: (* -> *)
type RPattern k :: (* -> *)
type RTerm k :: (* -> *)
instance AstIn k => AstOut (k :: j) where
type RKindF k = KiSum (KindList k)
type RTypeF k = TySum (TypeList k)
type RTypeSchemeF k = TySum (Append (TypeList k) (TypeSchemeList k))
type RPatternF k = PtSum (PatternList k)
type RTermF k = TmSum (TermList k)
type RKind k = Kind (RKindF k)
type RType k = Type (RKindF k) (RTypeF k)
type RTypeScheme k = Type (RKindF k) (RTypeSchemeF k)
type RPattern k = Pattern (RPatternF k)
type RTerm k = Term (RKindF k) (RTypeF k) (RPatternF k) (RTermF k)
| dalaing/type-systems | src/Ast.hs | bsd-3-clause | 2,511 | 0 | 15 | 603 | 1,226 | 678 | 548 | 55 | 0 |
-- |
-- Module : Language.SequentCore.OccurAnal
-- Description : Occurrence analysis for Sequent Core
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Normally occurrence analysis is performed by the simplifier, but if needed, this
-- module can update a module or a single term on demand.
{-
%
% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
%
%************************************************************************
%* *
\section[OccurAnal]{Occurrence analysis pass}
%* *
%************************************************************************
The occurrence analyser re-typechecks a core expression, returning a new
core expression with (hopefully) improved usage information.
-}
{-# LANGUAGE CPP, BangPatterns, ViewPatterns #-}
module Language.SequentCore.OccurAnal (
occurAnalysePgm, occurAnalyseTerm, occurAnalyseTerm_NoBinderSwap,
occurAnalyseJoin
) where
import Language.SequentCore.Syntax
import Id
import Name( localiseName )
import BasicTypes
import Module( Module )
import Coercion
import qualified CoreFVs as Core
import CoreSyn ( CoreRule, CoreVect )
import qualified CoreSyn as Core
import qualified CoreUtils as Core
import VarSet
import VarEnv
import Var
import Demand ( argOneShots, argsOneShots )
import Maybes ( orElse )
import Digraph ( SCC(..), stronglyConnCompFromEdgedVerticesR )
import Unique
import UniqFM
import Util
import Outputable
import FastString
import Control.Exception ( assert )
import Data.List
{-
%************************************************************************
%* *
\subsection[OccurAnal-main]{Counting occurrences: main function}
%* *
%************************************************************************
Here's the externally-callable interface:
-}
occurAnalysePgm :: Module -- Used only in debug output
-> (Activation -> Bool)
-> [CoreRule] -> [CoreVect] -> VarSet
-> SeqCoreProgram -> SeqCoreProgram
occurAnalysePgm this_mod active_rule imp_rules vects vectVars binds
| isEmptyVarEnv final_usage
= binds'
| otherwise -- See Note [Glomming]
= warnPprTrace True __FILE__ __LINE__
( hang (text "Glomming in" <+> ppr this_mod <> colon)
2 (ppr final_usage ) )
[Rec (flattenBinds binds')]
where
(final_usage, binds') = go (initOccEnv active_rule) binds
initial_uds = addIdOccs emptyDetails
(Core.rulesFreeVars imp_rules `unionVarSet`
Core.vectsFreeVars vects `unionVarSet`
vectVars)
-- The RULES and VECTORISE declarations keep things alive! (For VECTORISE declarations,
-- we only get them *until* the vectoriser runs. Afterwards, these dependencies are
-- reflected in 'vectors' — see Note [Vectorisation declarations and occurrences].)
-- Note [Preventing loops due to imported functions rules]
imp_rules_edges = foldr (plusVarEnv_C unionVarSet) emptyVarEnv
[ mapVarEnv (const maps_to) (Core.exprFreeIds arg `delVarSetList` Core.ru_bndrs imp_rule)
| imp_rule <- imp_rules
, let maps_to = Core.exprFreeIds (Core.ru_rhs imp_rule)
`delVarSetList` Core.ru_bndrs imp_rule
, arg <- Core.ru_args imp_rule ]
go :: OccEnv -> [SeqCoreBind] -> (UsageDetails, [SeqCoreBind])
go _ []
= (initial_uds, [])
go env (bind:binds)
= (final_usage, bind' ++ binds')
where
(bs_usage, binds') = go env binds
(final_usage, bind') = occAnalBind env env imp_rules_edges bind bs_usage
occurAnalyseTerm :: SeqCoreTerm -> SeqCoreTerm
-- Do occurrence analysis, and discard occurrence info returned
occurAnalyseTerm = occurAnalyseTerm' True -- do binder swap
occurAnalyseTerm_NoBinderSwap :: SeqCoreTerm -> SeqCoreTerm
occurAnalyseTerm_NoBinderSwap = occurAnalyseTerm' False -- do not do binder swap
occurAnalyseTerm' :: Bool -> SeqCoreTerm -> SeqCoreTerm
occurAnalyseTerm' enable_binder_swap term
= snd (occAnalTerm env term)
where
env = (initOccEnv all_active_rules) {occ_binder_swap = enable_binder_swap}
-- To be conservative, we say that all inlines and rules are active
all_active_rules = \_ -> True
occurAnalyseJoin :: SeqCoreJoin -> SeqCoreJoin
occurAnalyseJoin pk
= snd (occAnalJoin env pk)
where
env = initOccEnv all_active_rules
all_active_rules = \_ -> True
{-
%************************************************************************
%* *
\subsection[OccurAnal-main]{Counting occurrences: main function}
%* *
%************************************************************************
Bindings
~~~~~~~~
-}
occAnalBind :: OccEnv -- The incoming OccEnv
-> OccEnv -- Same, but trimmed by (binderOf bind)
-> IdEnv IdSet -- Mapping from FVs of imported RULE LHSs to RHS FVs
-> SeqCoreBind
-> UsageDetails -- Usage details of scope
-> (UsageDetails, -- Of the whole let(rec)
[SeqCoreBind])
occAnalBind env _ imp_rules_edges bind@(NonRec pair) body_usage
| isTyVar binder -- A type let; we don't gather usage info
= (body_usage, [bind])
| not (binder `usedIn` body_usage) -- It's not mentioned
= (body_usage, [])
| otherwise -- It's mentioned in the body
= (body_usage' +++ rhs_usage4, [NonRec pair'])
where
binder = binderOfPair pair
(body_usage', tagged_binder) = tagBinder body_usage binder
(rhs_usage1, pair') = occAnalNonRecRhs env (pair `setPairBinder` tagged_binder)
rhs_usage2 = addIdOccs rhs_usage1 (Core.idUnfoldingVars binder)
rhs_usage3 = addIdOccs rhs_usage2 (Core.idRuleVars binder)
-- See Note [Rules are extra RHSs] and Note [Rule dependency info]
rhs_usage4 = maybe rhs_usage3 (addIdOccs rhs_usage3) $ lookupVarEnv imp_rules_edges binder
-- See Note [Preventing loops due to imported functions rules]
occAnalBind _ env imp_rules_edges (Rec pairs) body_usage
= foldr occAnalRec (body_usage, []) sccs
-- For a recursive group, we
-- * occ-analyse all the RHSs
-- * compute strongly-connected components
-- * feed those components to occAnalRec
where
bndr_set = mkVarSet (map binderOfPair pairs)
sccs :: [SCC (Node Details)]
sccs = {-# SCC "occAnalBind.scc" #-} stronglyConnCompFromEdgedVerticesR nodes
nodes :: [Node Details]
nodes = {-# SCC "occAnalBind.assoc" #-} map (makeNode env imp_rules_edges bndr_set) pairs
{-
Note [Dead code]
~~~~~~~~~~~~~~~~
Dropping dead code for a cyclic Strongly Connected Component is done
in a very simple way:
the entire SCC is dropped if none of its binders are mentioned
in the body; otherwise the whole thing is kept.
The key observation is that dead code elimination happens after
dependency analysis: so 'occAnalBind' processes SCCs instead of the
original term's binding groups.
Thus 'occAnalBind' does indeed drop 'f' in an example like
letrec f = ...g...
g = ...(...g...)...
in
...g...
when 'g' no longer uses 'f' at all (eg 'f' does not occur in a RULE in
'g'). 'occAnalBind' first consumes 'CyclicSCC g' and then it consumes
'AcyclicSCC f', where 'body_usage' won't contain 'f'.
------------------------------------------------------------
Note [Forming Rec groups]
~~~~~~~~~~~~~~~~~~~~~~~~~
We put bindings {f = ef; g = eg } in a Rec group if "f uses g"
and "g uses f", no matter how indirectly. We do a SCC analysis
with an edge f -> g if "f uses g".
More precisely, "f uses g" iff g should be in scope wherever f is.
That is, g is free in:
a) the rhs 'ef'
b) or the RHS of a rule for f (Note [Rules are extra RHSs])
c) or the LHS or a rule for f (Note [Rule dependency info])
These conditions apply regardless of the activation of the RULE (eg it might be
inactive in this phase but become active later). Once a Rec is broken up
it can never be put back together, so we must be conservative.
The principle is that, regardless of rule firings, every variable is
always in scope.
* Note [Rules are extra RHSs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
A RULE for 'f' is like an extra RHS for 'f'. That way the "parent"
keeps the specialised "children" alive. If the parent dies
(because it isn't referenced any more), then the children will die
too (unless they are already referenced directly).
To that end, we build a Rec group for each cyclic strongly
connected component,
*treating f's rules as extra RHSs for 'f'*.
More concretely, the SCC analysis runs on a graph with an edge
from f -> g iff g is mentioned in
(a) f's rhs
(b) f's RULES
These are rec_edges.
Under (b) we include variables free in *either* LHS *or* RHS of
the rule. The former might seems silly, but see Note [Rule
dependency info]. So in Example [eftInt], eftInt and eftIntFB
will be put in the same Rec, even though their 'main' RHSs are
both non-recursive.
* Note [Rule dependency info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
The VarSet in a SpecInfo is used for dependency analysis in the
occurrence analyser. We must track free vars in *both* lhs and rhs.
Hence use of idRuleVars, rather than idRuleRhsVars in occAnalBind.
Why both? Consider
x = y
RULE f x = v+4
Then if we substitute y for x, we'd better do so in the
rule's LHS too, so we'd better ensure the RULE appears to mention 'x'
as well as 'v'
* Note [Rules are visible in their own rec group]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want the rules for 'f' to be visible in f's right-hand side.
And we'd like them to be visible in other functions in f's Rec
group. E.g. in Note [Specialisation rules] we want f' rule
to be visible in both f's RHS, and fs's RHS.
This means that we must simplify the RULEs first, before looking
at any of the definitions. This is done by Simplify.simplRecBind,
when it calls addLetIdInfo.
------------------------------------------------------------
Note [Choosing loop breakers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Loop breaking is surprisingly subtle. First read the section 4 of
"Secrets of the GHC inliner". This describes our basic plan.
We avoid infinite inlinings by choosing loop breakers, and
ensuring that a loop breaker cuts each loop.
Fundamentally, we do SCC analysis on a graph. For each recursive
group we choose a loop breaker, delete all edges to that node,
re-analyse the SCC, and iterate.
But what is the graph? NOT the same graph as was used for Note
[Forming Rec groups]! In particular, a RULE is like an equation for
'f' that is *always* inlined if it is applicable. We do *not* disable
rules for loop-breakers. It's up to whoever makes the rules to make
sure that the rules themselves always terminate. See Note [Rules for
recursive functions] in Simplify.lhs
Hence, if
f's RHS (or its INLINE template if it has one) mentions g, and
g has a RULE that mentions h, and
h has a RULE that mentions f
then we *must* choose f to be a loop breaker. Example: see Note
[Specialisation rules].
In general, take the free variables of f's RHS, and augment it with
all the variables reachable by RULES from those starting points. That
is the whole reason for computing rule_fv_env in occAnalBind. (Of
course we only consider free vars that are also binders in this Rec
group.) See also Note [Finding rule RHS free vars]
Note that when we compute this rule_fv_env, we only consider variables
free in the *RHS* of the rule, in contrast to the way we build the
Rec group in the first place (Note [Rule dependency info])
Note that if 'g' has RHS that mentions 'w', we should add w to
g's loop-breaker edges. More concretely there is an edge from f -> g
iff
(a) g is mentioned in f's RHS `xor` f's INLINE rhs
(see Note [Inline rules])
(b) or h is mentioned in f's RHS, and
g appears in the RHS of an active RULE of h
or a transitive sequence of active rules starting with h
Why "active rules"? See Note [Finding rule RHS free vars]
Note that in Example [eftInt], *neither* eftInt *nor* eftIntFB is
chosen as a loop breaker, because their RHSs don't mention each other.
And indeed both can be inlined safely.
Note again that the edges of the graph we use for computing loop breakers
are not the same as the edges we use for computing the Rec blocks.
That's why we compute
- rec_edges for the Rec block analysis
- loop_breaker_edges for the loop breaker analysis
* Note [Finding rule RHS free vars]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this real example from Data Parallel Haskell
tagZero :: Array Int -> Array Tag
{-# INLINE [1] tagZeroes #-}
tagZero xs = pmap (\x -> fromBool (x==0)) xs
{-# RULES "tagZero" [~1] forall xs n.
pmap fromBool <blah blah> = tagZero xs #-}
So tagZero's RHS mentions pmap, and pmap's RULE mentions tagZero.
However, tagZero can only be inlined in phase 1 and later, while
the RULE is only active *before* phase 1. So there's no problem.
To make this work, we look for the RHS free vars only for
*active* rules. That's the reason for the occ_rule_act field
of the OccEnv.
* Note [Weak loop breakers]
~~~~~~~~~~~~~~~~~~~~~~~~~
There is a last nasty wrinkle. Suppose we have
Rec { f = f_rhs
RULE f [] = g
h = h_rhs
g = h
...more...
}
Remember that we simplify the RULES before any RHS (see Note
[Rules are visible in their own rec group] above).
So we must *not* postInlineUnconditionally 'g', even though
its RHS turns out to be trivial. (I'm assuming that 'g' is
not choosen as a loop breaker.) Why not? Because then we
drop the binding for 'g', which leaves it out of scope in the
RULE!
Here's a somewhat different example of the same thing
Rec { g = h
; h = ...f...
; f = f_rhs
RULE f [] = g }
Here the RULE is "below" g, but we *still* can't postInlineUnconditionally
g, because the RULE for f is active throughout. So the RHS of h
might rewrite to h = ...g...
So g must remain in scope in the output program!
We "solve" this by:
Make g a "weak" loop breaker (OccInfo = IAmLoopBreaker True)
iff g is a "missing free variable" of the Rec group
A "missing free variable" x is one that is mentioned in an RHS or
INLINE or RULE of a binding in the Rec group, but where the
dependency on x may not show up in the loop_breaker_edges (see
note [Choosing loop breakers} above).
A normal "strong" loop breaker has IAmLoopBreaker False. So
Inline postInlineUnconditionally
IAmLoopBreaker False no no
IAmLoopBreaker True yes no
other yes yes
The **sole** reason for this kind of loop breaker is so that
postInlineUnconditionally does not fire. Ugh. (Typically it'll
inline via the usual callSiteInline stuff, so it'll be dead in the
next pass, so the main Ugh is the tiresome complication.)
Note [Rules for imported functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
f = /\a. B.g a
RULE B.g Int = 1 + f Int
Note that
* The RULE is for an imported function.
* f is non-recursive
Now we
can get
f Int --> B.g Int Inlining f
--> 1 + f Int Firing RULE
and so the simplifier goes into an infinite loop. This
would not happen if the RULE was for a local function,
because we keep track of dependencies through rules. But
that is pretty much impossible to do for imported Ids. Suppose
f's definition had been
f = /\a. C.h a
where (by some long and devious process), C.h eventually inlines to
B.g. We could only spot such loops by exhaustively following
unfoldings of C.h etc, in case we reach B.g, and hence (via the RULE)
f.
Note that RULES for imported functions are important in practice; they
occur a lot in the libraries.
We regard this potential infinite loop as a *programmer* error.
It's up the programmer not to write silly rules like
RULE f x = f x
and the example above is just a more complicated version.
Note [Preventing loops due to imported functions rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider:
import GHC.Base (foldr)
{-# RULES "filterList" forall p. foldr (filterFB (:) p) [] = filter p #-}
filter p xs = build (\c n -> foldr (filterFB c p) n xs)
filterFB c p = ...
f = filter p xs
Note that filter is not a loop-breaker, so what happens is:
f = filter p xs
= {inline} build (\c n -> foldr (filterFB c p) n xs)
= {inline} foldr (filterFB (:) p) [] xs
= {RULE} filter p xs
We are in an infinite loop.
A more elaborate example (that I actually saw in practice when I went to
mark GHC.List.filter as INLINABLE) is as follows. Say I have this module:
{-# LANGUAGE RankNTypes #-}
module GHCList where
import Prelude hiding (filter)
import GHC.Base (build)
{-# INLINABLE filter #-}
filter :: (a -> Bool) -> [a] -> [a]
filter p [] = []
filter p (x:xs) = if p x then x : filter p xs else filter p xs
{-# NOINLINE [0] filterFB #-}
filterFB :: (a -> b -> b) -> (a -> Bool) -> a -> b -> b
filterFB c p x r | p x = x `c` r
| otherwise = r
{-# RULES
"filter" [~1] forall p xs. filter p xs = build (\c n -> foldr
(filterFB c p) n xs)
"filterList" [1] forall p. foldr (filterFB (:) p) [] = filter p
#-}
Then (because RULES are applied inside INLINABLE unfoldings, but inlinings
are not), the unfolding given to "filter" in the interface file will be:
filter p [] = []
filter p (x:xs) = if p x then x : build (\c n -> foldr (filterFB c p) n xs)
else build (\c n -> foldr (filterFB c p) n xs
Note that because this unfolding does not mention "filter", filter is not
marked as a strong loop breaker. Therefore at a use site in another module:
filter p xs
= {inline}
case xs of [] -> []
(x:xs) -> if p x then x : build (\c n -> foldr (filterFB c p) n xs)
else build (\c n -> foldr (filterFB c p) n xs)
build (\c n -> foldr (filterFB c p) n xs)
= {inline} foldr (filterFB (:) p) [] xs
= {RULE} filter p xs
And we are in an infinite loop again, except that this time the loop is producing an
infinitely large *term* (an unrolling of filter) and so the simplifier finally
dies with "ticks exhausted"
Because of this problem, we make a small change in the occurrence analyser
designed to mark functions like "filter" as strong loop breakers on the basis that:
1. The RHS of filter mentions the local function "filterFB"
2. We have a rule which mentions "filterFB" on the LHS and "filter" on the RHS
So for each RULE for an *imported* function we are going to add
dependency edges between the *local* FVS of the rule LHS and the
*local* FVS of the rule RHS. We don't do anything special for RULES on
local functions because the standard occurrence analysis stuff is
pretty good at getting loop-breakerness correct there.
It is important to note that even with this extra hack we aren't always going to get
things right. For example, it might be that the rule LHS mentions an imported Id,
and another module has a RULE that can rewrite that imported Id to one of our local
Ids.
Note [Specialising imported functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
BUT for *automatically-generated* rules, the programmer can't be
responsible for the "programmer error" in Note [Rules for imported
functions]. In paricular, consider specialising a recursive function
defined in another module. If we specialise a recursive function B.g,
we get
g_spec = .....(B.g Int).....
RULE B.g Int = g_spec
Here, g_spec doesn't look recursive, but when the rule fires, it
becomes so. And if B.g was mutually recursive, the loop might
not be as obvious as it is here.
To avoid this,
* When specialising a function that is a loop breaker,
give a NOINLINE pragma to the specialised function
Note [Glomming]
~~~~~~~~~~~~~~~
RULES for imported Ids can make something at the top refer to something at the bottom:
f = \x -> B.g (q x)
h = \y -> 3
RULE: B.g (q x) = h x
Applying this rule makes f refer to h, although f doesn't appear to
depend on h. (And, as in Note [Rules for imported functions], the
dependency might be more indirect. For example, f might mention C.t
rather than B.g, where C.t eventually inlines to B.g.)
NOTICE that this cannot happen for rules whose head is a
locally-defined function, because we accurately track dependencies
through RULES. It only happens for rules whose head is an imported
function (B.g in the example above).
Solution:
- When simplifying, bring all top level identifiers into
scope at the start, ignoring the Rec/NonRec structure, so
that when 'h' pops up in f's rhs, we find it in the in-scope set
(as the simplifier generally expects). This happens in simplTopBinds.
- In the occurrence analyser, if there are any out-of-scope
occurrences that pop out of the top, which will happen after
firing the rule: f = \x -> h x
h = \y -> 3
then just glom all the bindings into a single Rec, so that
the *next* iteration of the occurrence analyser will sort
them all out. This part happens in occurAnalysePgm.
------------------------------------------------------------
Note [Inline rules]
~~~~~~~~~~~~~~~~~~~
None of the above stuff about RULES applies to Inline Rules,
stored in a CoreUnfolding. The unfolding, if any, is simplified
at the same time as the regular RHS of the function (ie *not* like
Note [Rules are visible in their own rec group]), so it should be
treated *exactly* like an extra RHS.
Or, rather, when computing loop-breaker edges,
* If f has an INLINE pragma, and it is active, we treat the
INLINE rhs as f's rhs
* If it's inactive, we treat f as having no rhs
* If it has no INLINE pragma, we look at f's actual rhs
There is a danger that we'll be sub-optimal if we see this
f = ...f...
[INLINE f = ..no f...]
where f is recursive, but the INLINE is not. This can just about
happen with a sufficiently odd set of rules; eg
foo :: Int -> Int
{-# INLINE [1] foo #-}
foo x = x+1
bar :: Int -> Int
{-# INLINE [1] bar #-}
bar x = foo x + 1
{-# RULES "foo" [~1] forall x. foo x = bar x #-}
Here the RULE makes bar recursive; but it's INLINE pragma remains
non-recursive. It's tempting to then say that 'bar' should not be
a loop breaker, but an attempt to do so goes wrong in two ways:
a) We may get
$df = ...$cfoo...
$cfoo = ...$df....
[INLINE $cfoo = ...no-$df...]
But we want $cfoo to depend on $df explicitly so that we
put the bindings in the right order to inline $df in $cfoo
and perhaps break the loop altogether. (Maybe this
b)
Example [eftInt]
~~~~~~~~~~~~~~~
Example (from GHC.Enum):
eftInt :: Int# -> Int# -> [Int]
eftInt x y = ...(non-recursive)...
{-# INLINE [0] eftIntFB #-}
eftIntFB :: (Int -> r -> r) -> r -> Int# -> Int# -> r
eftIntFB c n x y = ...(non-recursive)...
{-# RULES
"eftInt" [~1] forall x y. eftInt x y = build (\ c n -> eftIntFB c n x y)
"eftIntList" [1] eftIntFB (:) [] = eftInt
#-}
Note [Specialisation rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this group, which is typical of what SpecConstr builds:
fs a = ....f (C a)....
f x = ....f (C a)....
{-# RULE f (C a) = fs a #-}
So 'f' and 'fs' are in the same Rec group (since f refers to fs via its RULE).
But watch out! If 'fs' is not chosen as a loop breaker, we may get an infinite loop:
- the RULE is applied in f's RHS (see Note [Self-recursive rules] in Simplify
- fs is inlined (say it's small)
- now there's another opportunity to apply the RULE
This showed up when compiling Control.Concurrent.Chan.getChanContents.
-}
type Node details = (details, Unique, [Unique]) -- The Ints are gotten from the Unique,
-- which is gotten from the Id.
data Details
= ND { nd_bind_pair :: SeqCoreBindPair -- Binder and RHS, already occ-analysed
, nd_uds :: UsageDetails -- Usage from RHS, and RULES, and InlineRule unfolding
-- ignoring phase (ie assuming all are active)
-- See Note [Forming Rec groups]
, nd_inl :: IdSet -- Free variables of
-- the InlineRule (if present and active)
-- or the RHS (ir no InlineRule)
-- but excluding any RULES
-- This is the IdSet that may be used if the Id is inlined
, nd_weak :: IdSet -- Binders of this Rec that are mentioned in nd_uds
-- but are *not* in nd_inl. These are the ones whose
-- dependencies might not be respected by loop_breaker_edges
-- See Note [Weak loop breakers]
, nd_active_rule_fvs :: IdSet -- Free variables of the RHS of active RULES
}
instance Outputable Details where
ppr nd = ptext (sLit "ND") <> braces
(sep [ ptext (sLit "bndr =") <+> ppr (binderOfPair (nd_bind_pair nd))
, ptext (sLit "uds =") <+> ppr (nd_uds nd)
, ptext (sLit "inl =") <+> ppr (nd_inl nd)
, ptext (sLit "weak =") <+> ppr (nd_weak nd)
, ptext (sLit "rule =") <+> ppr (nd_active_rule_fvs nd)
])
makeNode :: OccEnv -> IdEnv IdSet -> VarSet -> SeqCoreBindPair -> Node Details
makeNode env imp_rules_edges bndr_set pair
= (details, varUnique bndr, keysUFM node_fvs)
where
bndr = binderOfPair pair
details = ND { nd_bind_pair = pair'
, nd_uds = rhs_usage3
, nd_weak = node_fvs `minusVarSet` inl_fvs
, nd_inl = inl_fvs
, nd_active_rule_fvs = active_rule_fvs }
-- Constructing the edges for the main Rec computation
-- See Note [Forming Rec groups]
(rhs_usage1, pair') = occAnalRecRhs env pair
rhs_usage2 = addIdOccs rhs_usage1 all_rule_fvs -- Note [Rules are extra RHSs]
-- Note [Rule dependency info]
rhs_usage3 = case mb_unf_fvs of
Just unf_fvs -> addIdOccs rhs_usage2 unf_fvs
Nothing -> rhs_usage2
node_fvs = udFreeVars bndr_set rhs_usage3
-- Finding the free variables of the rules
is_active = occ_rule_act env :: Activation -> Bool
rules = filterOut Core.isBuiltinRule (idCoreRules bndr)
rules_w_fvs :: [(Activation, VarSet)] -- Find the RHS fvs
rules_w_fvs = maybe id (\ids -> ((AlwaysActive, ids):)) (lookupVarEnv imp_rules_edges bndr)
-- See Note [Preventing loops due to imported functions rules]
[ (Core.ru_act rule, fvs)
| rule <- rules
, let fvs = Core.exprFreeVars (Core.ru_rhs rule)
`delVarSetList` Core.ru_bndrs rule
, not (isEmptyVarSet fvs) ]
all_rule_fvs = foldr (unionVarSet . snd) rule_lhs_fvs rules_w_fvs
rule_lhs_fvs = foldr (unionVarSet . (\ru -> Core.exprsFreeVars (Core.ru_args ru)
`delVarSetList` Core.ru_bndrs ru))
emptyVarSet rules
active_rule_fvs = unionVarSets [fvs | (a,fvs) <- rules_w_fvs, is_active a]
-- Finding the free variables of the INLINE pragma (if any)
unf = realIdUnfolding bndr -- Ignore any current loop-breaker flag
mb_unf_fvs = Core.stableUnfoldingVars unf
-- Find the "nd_inl" free vars; for the loop-breaker phase
inl_fvs = case mb_unf_fvs of
Nothing -> udFreeVars bndr_set rhs_usage1 -- No INLINE, use RHS
Just unf_fvs -> unf_fvs
-- We could check for an *active* INLINE (returning
-- emptyVarSet for an inactive one), but is_active
-- isn't the right thing (it tells about
-- RULE activation), so we'd need more plumbing
-----------------------------
occAnalRec :: SCC (Node Details)
-> (UsageDetails, [SeqCoreBind])
-> (UsageDetails, [SeqCoreBind])
-- The NonRec case is just like a Let (NonRec ...) above
occAnalRec (AcyclicSCC (ND { nd_bind_pair = pair, nd_uds = rhs_uds}, _, _))
(body_uds, binds)
| not (bndr `usedIn` body_uds)
= (body_uds, binds) -- See Note [Dead code]
| otherwise -- It's mentioned in the body
= (body_uds' +++ rhs_uds,
NonRec (pair `setPairBinder` tagged_bndr) : binds)
where
bndr = binderOfPair pair
(body_uds', tagged_bndr) = tagBinder body_uds bndr
-- The Rec case is the interesting one
-- See Note [Loop breaking]
occAnalRec (CyclicSCC nodes) (body_uds, binds)
| not (any (`usedIn` body_uds) bndrs) -- NB: look at body_uds, not total_uds
= (body_uds, binds) -- See Note [Dead code]
| otherwise -- At this point we always build a single Rec
= -- pprTrace "occAnalRec" (vcat
-- [ text "tagged nodes" <+> ppr tagged_nodes
-- , text "lb edges" <+> ppr loop_breaker_edges])
(final_uds, Rec pairs : binds)
where
bndrs = [binderOfPair b | (ND { nd_bind_pair = b }, _, _) <- nodes]
bndr_set = mkVarSet bndrs
----------------------------
-- Tag the binders with their occurrence info
tagged_nodes = map tag_node nodes
total_uds = foldl add_uds body_uds nodes
final_uds = total_uds `minusVarEnv` bndr_set
add_uds usage_so_far (nd, _, _) = usage_so_far +++ nd_uds nd
tag_node :: Node Details -> Node Details
tag_node (details@ND { nd_bind_pair = pair }, k, ks)
= (details { nd_bind_pair = pair `setPairBinder` setBinderOcc total_uds bndr }, k, ks)
where
bndr = binderOfPair pair
---------------------------
-- Now reconstruct the cycle
pairs :: [SeqCoreBindPair]
pairs | isEmptyVarSet weak_fvs = reOrderNodes 0 bndr_set weak_fvs tagged_nodes []
| otherwise = loopBreakNodes 0 bndr_set weak_fvs loop_breaker_edges []
-- If weak_fvs is empty, the loop_breaker_edges will include all
-- the edges in tagged_nodes, so there isn't any point in doing
-- a fresh SCC computation that will yield a single CyclicSCC result.
weak_fvs :: VarSet
weak_fvs = foldr (unionVarSet . nd_weak . fstOf3) emptyVarSet nodes
-- See Note [Choosing loop breakers] for loop_breaker_edges
loop_breaker_edges = map mk_node tagged_nodes
mk_node (details@(ND { nd_inl = inl_fvs }), k, _)
= (details, k, keysUFM (extendFvs_ rule_fv_env inl_fvs))
------------------------------------
rule_fv_env :: IdEnv IdSet
-- Maps a variable f to the variables from this group
-- mentioned in RHS of active rules for f
-- Domain is *subset* of bound vars (others have no rule fvs)
rule_fv_env = transClosureFV (mkVarEnv init_rule_fvs)
init_rule_fvs -- See Note [Finding rule RHS free vars]
= [ (binderOfPair pair, trimmed_rule_fvs)
| (ND { nd_bind_pair = pair, nd_active_rule_fvs = rule_fvs },_,_) <- nodes
, let trimmed_rule_fvs = rule_fvs `intersectVarSet` bndr_set
, not (isEmptyVarSet trimmed_rule_fvs)]
{-
@loopBreakSCC@ is applied to the list of (binder,rhs) pairs for a cyclic
strongly connected component (there's guaranteed to be a cycle). It returns the
same pairs, but
a) in a better order,
b) with some of the Ids having a IAmALoopBreaker pragma
The "loop-breaker" Ids are sufficient to break all cycles in the SCC. This means
that the simplifier can guarantee not to loop provided it never records an inlining
for these no-inline guys.
Furthermore, the order of the binds is such that if we neglect dependencies
on the no-inline Ids then the binds are topologically sorted. This means
that the simplifier will generally do a good job if it works from top bottom,
recording inlinings for any Ids which aren't marked as "no-inline" as it goes.
-}
type Binding = SeqCoreBindPair
mk_loop_breaker :: Node Details -> Binding
mk_loop_breaker (ND { nd_bind_pair = pair }, _, _)
= pair `setPairBinder` setIdOccInfo bndr strongLoopBreaker
where
bndr = binderOfPair pair
mk_non_loop_breaker :: VarSet -> Node Details -> Binding
-- See Note [Weak loop breakers]
mk_non_loop_breaker used_in_rules (ND { nd_bind_pair = pair}, _, _)
| bndr `elemVarSet` used_in_rules = pair `setPairBinder` setIdOccInfo bndr weakLoopBreaker
| otherwise = pair
where
bndr = binderOfPair pair
udFreeVars :: VarSet -> UsageDetails -> VarSet
-- Find the subset of bndrs that are mentioned in uds
udFreeVars bndrs uds = intersectUFM_C (\b _ -> b) bndrs uds
loopBreakNodes :: Int
-> VarSet -- All binders
-> VarSet -- Binders whose dependencies may be "missing"
-- See Note [Weak loop breakers]
-> [Node Details]
-> [Binding] -- Append these to the end
-> [Binding]
-- Return the bindings sorted into a plausible order, and marked with loop breakers.
loopBreakNodes depth bndr_set weak_fvs nodes binds
= go (stronglyConnCompFromEdgedVerticesR nodes) binds
where
go [] binds = binds
go (scc:sccs) binds = loop_break_scc scc (go sccs binds)
loop_break_scc scc binds
= case scc of
AcyclicSCC node -> mk_non_loop_breaker weak_fvs node : binds
CyclicSCC [node] -> mk_loop_breaker node : binds
CyclicSCC nodes -> reOrderNodes depth bndr_set weak_fvs nodes binds
reOrderNodes :: Int -> VarSet -> VarSet -> [Node Details] -> [Binding] -> [Binding]
-- Choose a loop breaker, mark it no-inline,
-- do SCC analysis on the rest, and recursively sort them out
reOrderNodes _ _ _ [] _ = panic "reOrderNodes"
reOrderNodes depth bndr_set weak_fvs (node : nodes) binds
= -- pprTrace "reOrderNodes" (text "unchosen" <+> ppr unchosen $$
-- text "chosen" <+> ppr chosen_nodes) $
loopBreakNodes new_depth bndr_set weak_fvs unchosen $
(map mk_loop_breaker chosen_nodes ++ binds)
where
(chosen_nodes, unchosen) = choose_loop_breaker (score node) [node] [] nodes
approximate_loop_breaker = depth >= 2
new_depth | approximate_loop_breaker = 0
| otherwise = depth+1
-- After two iterations (d=0, d=1) give up
-- and approximate, returning to d=0
choose_loop_breaker :: Int -- Best score so far
-> [Node Details] -- Nodes with this score
-> [Node Details] -- Nodes with higher scores
-> [Node Details] -- Unprocessed nodes
-> ([Node Details], [Node Details])
-- This loop looks for the bind with the lowest score
-- to pick as the loop breaker. The rest accumulate in
choose_loop_breaker _ loop_nodes acc []
= (loop_nodes, acc) -- Done
-- If approximate_loop_breaker is True, we pick *all*
-- nodes with lowest score, else just one
-- See Note [Complexity of loop breaking]
choose_loop_breaker loop_sc loop_nodes acc (node : nodes)
| sc < loop_sc -- Lower score so pick this new one
= choose_loop_breaker sc [node] (loop_nodes ++ acc) nodes
| approximate_loop_breaker && sc == loop_sc
= choose_loop_breaker loop_sc (node : loop_nodes) acc nodes
| otherwise -- Higher score so don't pick it
= choose_loop_breaker loop_sc loop_nodes (node : acc) nodes
where
sc = score node
score :: Node Details -> Int -- Higher score => less likely to be picked as loop breaker
score (ND { nd_bind_pair = pair@(binderOfPair -> bndr) }, _, _)
| not (isId bndr) = 100 -- A type or cercion variable is never a loop breaker
| isDFunId bndr = 9 -- Never choose a DFun as a loop breaker
-- Note [DFuns should not be loop breakers]
| Just _ <- Core.isStableCoreUnfolding_maybe (idUnfolding bndr)
= 3 -- Note [INLINE pragmas]
-- Data structures are more important than INLINE pragmas
-- so that dictionary/method recursion unravels
-- Note that this case hits all InlineRule things, so we
-- never look at 'rhs' for InlineRule stuff. That's right, because
-- 'rhs' is irrelevant for inlining things with an InlineRule
| BindTerm _ term <- pair
, is_con_app term = 5 -- Data types help with cases: Note [Constructor applications]
| trivial pair = 10 -- Practically certain to be inlined
-- Used to have also: && not (isExportedId bndr)
-- But I found this sometimes cost an extra iteration when we have
-- rec { d = (a,b); a = ...df...; b = ...df...; df = d }
-- where df is the exported dictionary. Then df makes a really
-- bad choice for loop breaker
-- If an Id is marked "never inline" then it makes a great loop breaker
-- The only reason for not checking that here is that it is rare
-- and I've never seen a situation where it makes a difference,
-- so it probably isn't worth the time to test on every binder
-- | isNeverActive (idInlinePragma bndr) = -10
| isOneOcc (idOccInfo bndr) = 2 -- Likely to be inlined
| Core.canUnfold (realIdUnfolding bndr) = 1
-- The Id has some kind of unfolding
-- Ignore loop-breaker-ness here because that is what we are setting!
| otherwise = 0
-- Checking for a constructor application
-- Cheap and cheerful; the simplifer moves casts out of the way
-- The lambda case is important to spot x = /\a. C (f a)
-- which comes up when C is a dictionary constructor and
-- f is a default method.
-- Example: the instance for Show (ST s a) in GHC.ST
--
-- However we *also* treat (\x. C p q) as a con-app-like thing,
-- Note [Closure conversion]
is_con_app (Var x) = isConLikeId x
is_con_app (Lam _ v) = is_con_app v
is_con_app (Compute _ c) = is_command_con_app c
is_con_app _ = False
is_command_con_app (Eval v _ _) = is_con_app v
is_command_con_app _ = False
trivial (BindTerm _ term) = isTrivialTerm term
trivial (BindJoin _ join) = isTrivialJoin join
{-
Note [Complexity of loop breaking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The loop-breaking algorithm knocks out one binder at a time, and
performs a new SCC analysis on the remaining binders. That can
behave very badly in tightly-coupled groups of bindings; in the
worst case it can be (N**2)*log N, because it does a full SCC
on N, then N-1, then N-2 and so on.
To avoid this, we switch plans after 2 (or whatever) attempts:
Plan A: pick one binder with the lowest score, make it
a loop breaker, and try again
Plan B: pick *all* binders with the lowest score, make them
all loop breakers, and try again
Since there are only a small finite number of scores, this will
terminate in a constant number of iterations, rather than O(N)
iterations.
You might thing that it's very unlikely, but RULES make it much
more likely. Here's a real example from Trac #1969:
Rec { $dm = \d.\x. op d
{-# RULES forall d. $dm Int d = $s$dm1
forall d. $dm Bool d = $s$dm2 #-}
dInt = MkD .... opInt ...
dInt = MkD .... opBool ...
opInt = $dm dInt
opBool = $dm dBool
$s$dm1 = \x. op dInt
$s$dm2 = \x. op dBool }
The RULES stuff means that we can't choose $dm as a loop breaker
(Note [Choosing loop breakers]), so we must choose at least (say)
opInt *and* opBool, and so on. The number of loop breakders is
linear in the number of instance declarations.
Note [INLINE pragmas]
~~~~~~~~~~~~~~~~~~~~~
Avoid choosing a function with an INLINE pramga as the loop breaker!
If such a function is mutually-recursive with a non-INLINE thing,
then the latter should be the loop-breaker.
----> Historical note, dating from when strictness wrappers
were generated from the strictness signatures:
Usually this is just a question of optimisation. But a particularly
bad case is wrappers generated by the demand analyser: if you make
then into a loop breaker you may get an infinite inlining loop. For
example:
rec {
$wfoo x = ....foo x....
{-loop brk-} foo x = ...$wfoo x...
}
The interface file sees the unfolding for $wfoo, and sees that foo is
strict (and hence it gets an auto-generated wrapper). Result: an
infinite inlining in the importing scope. So be a bit careful if you
change this. A good example is Tree.repTree in
nofib/spectral/minimax. If the repTree wrapper is chosen as the loop
breaker then compiling Game.hs goes into an infinite loop. This
happened when we gave is_con_app a lower score than inline candidates:
Tree.repTree
= __inline_me (/\a. \w w1 w2 ->
case Tree.$wrepTree @ a w w1 w2 of
{ (# ww1, ww2 #) -> Branch @ a ww1 ww2 })
Tree.$wrepTree
= /\a w w1 w2 ->
(# w2_smP, map a (Tree a) (Tree.repTree a w1 w) (w w2) #)
Here we do *not* want to choose 'repTree' as the loop breaker.
-----> End of historical note
Note [DFuns should not be loop breakers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's particularly bad to make a DFun into a loop breaker. See
Note [How instance declarations are translated] in TcInstDcls
We give DFuns a higher score than ordinary CONLIKE things because
if there's a choice we want the DFun to be the non-looop breker. Eg
rec { sc = /\ a \$dC. $fBWrap (T a) ($fCT @ a $dC)
$fCT :: forall a_afE. (Roman.C a_afE) => Roman.C (Roman.T a_afE)
{-# DFUN #-}
$fCT = /\a \$dC. MkD (T a) ((sc @ a $dC) |> blah) ($ctoF @ a $dC)
}
Here 'sc' (the superclass) looks CONLIKE, but we'll never get to it
if we can't unravel the DFun first.
Note [Constructor applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's really really important to inline dictionaries. Real
example (the Enum Ordering instance from GHC.Base):
rec f = \ x -> case d of (p,q,r) -> p x
g = \ x -> case d of (p,q,r) -> q x
d = (v, f, g)
Here, f and g occur just once; but we can't inline them into d.
On the other hand we *could* simplify those case expressions if
we didn't stupidly choose d as the loop breaker.
But we won't because constructor args are marked "Many".
Inlining dictionaries is really essential to unravelling
the loops in static numeric dictionaries, see GHC.Float.
Note [Closure conversion]
~~~~~~~~~~~~~~~~~~~~~~~~~
We treat (\x. C p q) as a high-score candidate in the letrec scoring algorithm.
The immediate motivation came from the result of a closure-conversion transformation
which generated code like this:
data Clo a b = forall c. Clo (c -> a -> b) c
($:) :: Clo a b -> a -> b
Clo f env $: x = f env x
rec { plus = Clo plus1 ()
; plus1 _ n = Clo plus2 n
; plus2 Zero n = n
; plus2 (Succ m) n = Succ (plus $: m $: n) }
If we inline 'plus' and 'plus1', everything unravels nicely. But if
we choose 'plus1' as the loop breaker (which is entirely possible
otherwise), the loop does not unravel nicely.
@occAnalRhs@ deals with the question of bindings where the Id is marked
by an INLINE pragma. For these we record that anything which occurs
in its RHS occurs many times. This pessimistically assumes that ths
inlined binder also occurs many times in its scope, but if it doesn't
we'll catch it next time round. At worst this costs an extra simplifier pass.
ToDo: try using the occurrence info for the inline'd binder.
[March 97] We do the same for atomic RHSs. Reason: see notes with loopBreakSCC.
[June 98, SLPJ] I've undone this change; I don't understand it. See notes with loopBreakSCC.
-}
occAnalRecRhs :: OccEnv -> SeqCoreBindPair -- Binder and rhs
-> (UsageDetails, SeqCoreBindPair)
-- Returned usage details covers only the RHS,
-- and *not* the RULE or INLINE template for the Id
occAnalRecRhs env rhs = occAnalRhs (rhsCtxt env) rhs
occAnalNonRecRhs :: OccEnv
-> SeqCoreBindPair -- Binder and rhs
-- Binder is already tagged with occurrence info
-> (UsageDetails, SeqCoreBindPair)
-- Returned usage details covers only the RHS,
-- and *not* the RULE or INLINE template for the Id
occAnalNonRecRhs env pair
= occAnalRhs rhs_env pair
where
-- See Note [Use one-shot info]
env1 = env { occ_one_shots = argOneShots OneShotLam dmd }
-- See Note [Cascading inlines]
rhs_env | certainly_inline = env1
| otherwise = rhsCtxt env1
certainly_inline -- See Note [Cascading inlines]
= case idOccInfo bndr of
OneOcc in_lam one_br _ -> not in_lam && one_br && active && not_stable
_ -> False
bndr = binderOfPair pair
dmd = idDemandInfo bndr
active = isAlwaysActive (idInlineActivation bndr)
not_stable = not (Core.isStableUnfolding (idUnfolding bndr))
addIdOccs :: UsageDetails -> VarSet -> UsageDetails
addIdOccs usage id_set = foldVarSet add usage id_set
where
add v u | isId v = addOneOcc u v NoOccInfo
| otherwise = u
-- Give a non-committal binder info (i.e NoOccInfo) because
-- a) Many copies of the specialised thing can appear
-- b) We don't want to substitute a BIG expression inside a RULE
-- even if that's the only occurrence of the thing
-- (Same goes for INLINE.)
{-
Note [Cascading inlines]
~~~~~~~~~~~~~~~~~~~~~~~~
By default we use an rhsCtxt for the RHS of a binding. This tells the
occ anal n that it's looking at an RHS, which has an effect in
occAnalApp. In particular, for constructor applications, it makes
the arguments appear to have NoOccInfo, so that we don't inline into
them. Thus x = f y
k = Just x
we do not want to inline x.
But there's a problem. Consider
x1 = a0 : []
x2 = a1 : x1
x3 = a2 : x2
g = f x3
First time round, it looks as if x1 and x2 occur as an arg of a
let-bound constructor ==> give them a many-occurrence.
But then x3 is inlined (unconditionally as it happens) and
next time round, x2 will be, and the next time round x1 will be
Result: multiple simplifier iterations. Sigh.
So, when analysing the RHS of x3 we notice that x3 will itself
definitely inline the next time round, and so we analyse x3's rhs in
an ordinary context, not rhsCtxt. Hence the "certainly_inline" stuff.
Annoyingly, we have to approximate SimplUtils.preInlineUnconditionally.
If we say "yes" when preInlineUnconditionally says "no" the simplifier iterates
indefinitely:
x = f y
k = Just x
inline ==>
k = Just (f y)
float ==>
x1 = f y
k = Just x1
This is worse than the slow cascade, so we only want to say "certainly_inline"
if it really is certain. Look at the note with preInlineUnconditionally
for the various clauses.
Expressions
~~~~~~~~~~~
-}
occAnalRhs :: OccEnv
-> SeqCoreBindPair -- Only the RHS is used
-> (UsageDetails,
SeqCoreBindPair) -- Binder unchanged
occAnalRhs env (BindTerm x term)
= let (details, term') = occAnalTerm env term in (details, BindTerm x term')
occAnalRhs env (BindJoin j join)
= let (details, join') = occAnalJoin env join in (details, BindJoin j join')
occAnalTerm :: OccEnv
-> SeqCoreTerm
-> (UsageDetails, -- Gives info only about the "interesting" Ids
SeqCoreTerm)
occAnalTerm _ expr@(Type _) = (emptyDetails, expr)
occAnalTerm _ expr@(Lit _) = (emptyDetails, expr)
occAnalTerm env expr@(Var v) = (mkOneOcc env v False, expr)
-- At one stage, I gathered the idRuleVars for v here too,
-- which in a way is the right thing to do.
-- But that went wrong right after specialisation, when
-- the *occurrences* of the overloaded function didn't have any
-- rules in them, so the *specialised* versions looked as if they
-- weren't used at all.
occAnalTerm _ (Coercion co)
= (addIdOccs emptyDetails (coVarsOfCo co), Coercion co)
-- See Note [Gather occurrences of coercion veriables]
-- Ignore type variables altogether
-- (a) occurrences inside type lambdas only not marked as InsideLam
-- (b) type variables not in environment
occAnalTerm env (Lam x body) | isTyVar x
= case occAnalTerm env body of { (body_usage, body') ->
(body_usage, Lam x body')
}
-- For value lambdas we do a special hack. Consider
-- (\x. \y. ...x...)
-- If we did nothing, x is used inside the \y, so would be marked
-- as dangerous to dup. But in the common case where the abstraction
-- is applied to two arguments this is over-pessimistic.
-- So instead, we just mark each binder with its occurrence
-- info in the *body* of the multiple lambda.
-- Then, the simplifier is careful when partially applying lambdas.
occAnalTerm env expr@(Lam _ _)
= case occAnalTerm env_body body of { ( body_usage, body' ) ->
let
(final_usage, tagged_binders) = tagLamBinders body_usage binders'
-- Use binders' to put one-shot info on the lambdas
really_final_usage
| all isOneShotBndr binders' = final_usage
| otherwise = mapVarEnv markInsideLam final_usage
in
(really_final_usage, mkLambdas tagged_binders body') }
where
(binders, body) = collectBinders expr
(env_body, binders') = oneShotGroup env binders
occAnalTerm env (Compute ty comm)
= case occAnalCommand env comm of { ( comm_usage, comm' ) ->
(comm_usage, Compute ty comm') }
occAnalKont :: OccEnv
-> UsageDetails -- ^ Usage details for the term
-> SeqCoreKont
-> (UsageDetails,
SeqCoreKont) -- ^ Usage details for the term *and* continuation
occAnalKont env uds (frames@(App {} : _), end)
= case occAnalArgs env args [] of { ( args_uds, args' ) ->
case occAnalKont env (uds +++ args_uds) kont' of { ( final_uds, (frames'', end') ) ->
(final_uds, (map App args' ++ frames'', end')) }}
where
(args, frames') = collectArgs frames
kont' = (frames', end)
occAnalKont env uds ([], Case bndr alts)
= case occAnalCase env Nothing bndr alts of { ( case_uds, kont' ) ->
(uds +++ case_uds, ([], kont')) }
{-
Note [Gather occurrences of coercion veriables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to gather info about what coercion variables appear, so that
we can sort them into the right place when doing dependency analysis.
-}
occAnalKont env uds (Tick tickish : frames, end)
| Core.Breakpoint _ ids <- tickish
= (mapVarEnv markInsideSCC usage
+++ mkVarEnv (zip ids (repeat NoOccInfo)),
(Tick tickish : frames', end'))
-- never substitute for any of the Ids in a Breakpoint
| Core.tickishScoped tickish
= (mapVarEnv markInsideSCC usage, (Tick tickish : frames', end'))
| otherwise
= (usage, (Tick tickish : frames, end'))
where
!(usage,(frames', end')) = occAnalKont env uds (frames, end)
occAnalKont env usage (Cast co : frames, end)
= let usage1 = markManyIf (isRhsEnv env) usage
-- If we see let x = y `cast` co
-- then mark y as 'Many' so that we don't
-- immediately inline y again.
usage2 = addIdOccs usage1 (coVarsOfCo co)
-- See Note [Gather occurrences of coercion veriables]
in case occAnalKont env usage2 (frames, end) of { (usage3, (frames', end')) ->
(usage3, (Cast co : frames', end'))
}
occAnalKont _env uds kont@([], Return)
= (uds, kont)
occAnalJoin :: OccEnv
-> SeqCoreJoin
-> (UsageDetails,
SeqCoreJoin)
occAnalJoin env (Join bndrs body)
-- TODO Too much C&P from occAnalTerm/Lam
= case occAnalCommand env_body body of { ( body_usage, body' ) ->
let
(final_usage, tagged_valBndrs) = tagLamBinders body_usage valBndrs'
really_final_usage
| all isOneShotBndr valBndrs' = final_usage
| otherwise = mapVarEnv markInsideLam final_usage
in
(really_final_usage, Join (tyBndrs ++ tagged_valBndrs) body') }
where
(tyBndrs, valBndrs) = span isTyVar bndrs -- types always first in Join
-- It's tempting to say that we should just one-shot everything, since this
-- is a join point, but a *recursive* join point can be called more than
-- once.
(env_body, valBndrs') = oneShotGroup env valBndrs
occAnalCommand :: OccEnv
-> SeqCoreCommand
-> (UsageDetails,
SeqCoreCommand)
occAnalCommand env (Let bind comm)
= case occAnalCommand env comm of { ( body_usage, body' ) ->
case occAnalBind env env emptyVarEnv bind body_usage of { ( final_usage, binds' ) ->
(final_usage, addLets binds' body') }}
occAnalCommand env (Eval term frames end)
= occAnalCut env term (frames, end)
occAnalCommand env (Jump args j)
= occAnalJump env args j
occAnalCut :: OccEnv
-> SeqCoreTerm
-> SeqCoreKont
-> (UsageDetails,
SeqCoreCommand)
occAnalCut env (Var x) ([], Case bndr alts@(alt1 : otherAlts))
-- If a variable is scrutinised by a case with at least one non-default
-- alternative, mark it as appearing in an interesting context
| not (null otherAlts) || not (isDefaultAlt alt1)
= case occAnalCase env (Just (x, Nothing)) bndr alts of { ( kont_usage, end') ->
(kont_usage +++ mkOneOcc env x True, Eval (Var x) [] end') }
-- FIXME This logic could probably be made cleaner.
-- mkAltEnv (and hence occAnalCase) has two special cases it checks for: When
-- the scrutinee is a variable, and when it's a variable under a cast. The
-- original version got a scrutinee expression as an argument, so it could check
-- for itself, but we usually just hand occAnalCase the continuation, so we
-- have to look for those special cases here.
occAnalCut env (Var x) kont
| Just (co_maybe, fs, bndr, alts) <- match kont
= case occAnalCase env (Just (x, co_maybe)) bndr alts of { ( kont_usage, end') ->
(kont_usage +++ mkOneOcc env x False, Eval (Var x) fs end') }
where
match (fs@[Cast co], Case bndr alts) = Just (Just co, fs, bndr, alts)
match ([], Case bndr alts) = Just (Nothing, [], bndr, alts)
match _ = Nothing
occAnalCut env fun (frames@(App {} : _), end)
= let (args, frames') = collectArgs frames
in occAnalApp env fun args (frames', end)
occAnalCut env term kont
= case occAnalTerm (vanillaCtxt env) term of { ( term_usage, term' ) ->
case occAnalKont env term_usage kont of { ( final_usage, (frames', end') ) ->
( final_usage, Eval term' frames' end' ) }}
occAnalJump :: OccEnv
-> [SeqCoreArg]
-> JoinId
-> (UsageDetails,
SeqCoreCommand)
occAnalJump env args j
= case mapAndUnzip (occAnalTerm env) args of { ( arg_usages, new_args ) ->
let j_usage = mkOneOcc env j True
in ( foldr (+++) j_usage arg_usages, Jump new_args j ) }
{-
Applications are dealt with specially because we want
the "build hack" to work.
Note [Arguments of let-bound constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f x = let y = expensive x in
let z = (True,y) in
(case z of {(p,q)->q}, case z of {(p,q)->q})
We feel free to duplicate the WHNF (True,y), but that means
that y may be duplicated thereby.
If we aren't careful we duplicate the (expensive x) call!
Constructors are rather like lambdas in this way.
-}
occAnalApp :: OccEnv
-> SeqCoreTerm -> [SeqCoreTerm] -> SeqCoreKont
-> (UsageDetails, SeqCoreCommand)
occAnalApp env (Var fun) args kont
= case args_stuff of { (args_uds, args') ->
let
final_args_uds = markManyIf (isRhsEnv env && is_exp) args_uds
-- We mark the free vars of the argument of a constructor or PAP
-- as "many", if it is the RHS of a let(rec).
-- This means that nothing gets inlined into a constructor argument
-- position, which is what we want. Typically those constructor
-- arguments are just variables, or trivial expressions.
--
-- This is the *whole point* of the isRhsEnv predicate
-- See Note [Arguments of let-bound constructors]
in
case occAnalKont env (fun_uds +++ final_args_uds) kont of { (total_uds, (frames', end')) ->
(total_uds, Eval (Var fun) (map App args' ++ frames') end') }}
where
n_val_args = length (filter isValueArg args)
fun_uds = mkOneOcc env fun (n_val_args > 0)
is_exp = Core.isExpandableApp fun n_val_args
-- See Note [CONLIKE pragma] in BasicTypes
-- The definition of is_exp should match that in
-- Simplify.prepareRhs
one_shots = argsOneShots (idStrictness fun) n_val_args
-- See Note [Use one-shot info]
args_stuff = occAnalArgs env args one_shots
-- (foldr k z xs) may call k many times, but it never
-- shares a partial application of k; hence [False,True]
-- This means we can optimise
-- foldr (\x -> let v = ...x... in \y -> ...v...) z xs
-- by floating in the v
occAnalApp env fun args kont
= case occAnalTerm (addAppCtxt env args) fun of { (fun_uds, fun') ->
-- The addAppCtxt is a bit cunning. One iteration of the simplifier
-- often leaves behind beta redexs like
-- (\x y -> e) a1 a2
-- Here we would like to mark x,y as one-shot, and treat the whole
-- thing much like a let. We do this by pushing some True items
-- onto the context stack.
case occAnalArgs env args [] of { (args_uds, args') ->
case occAnalKont env (fun_uds +++ args_uds) kont of { (total_uds, (frames', end')) ->
(total_uds, Eval fun' (map App args' ++ frames') end') }}}
occAnalArgs :: OccEnv -> [SeqCoreTerm] -> [OneShots] -> (UsageDetails, [SeqCoreTerm])
occAnalArgs _ [] _
= (emptyDetails, [])
occAnalArgs env (arg:args) one_shots
| Type {} <- arg
= case occAnalArgs env args one_shots of { (uds, args') ->
(uds, arg:args') }
| otherwise
= case argCtxt env one_shots of { (arg_env, one_shots') ->
case occAnalTerm arg_env arg of { (uds1, arg') ->
case occAnalArgs env args one_shots' of { (uds2, args') ->
(uds1 +++ uds2, arg':args') }}}
markManyIf :: Bool -- If this is true
-> UsageDetails -- Then do markMany on this
-> UsageDetails
markManyIf True uds = mapVarEnv markMany uds
markManyIf False uds = uds
occAnalCase :: OccEnv -> Maybe (Id, Maybe Coercion) -> SeqCoreBndr
-> [SeqCoreAlt] -> (UsageDetails, SeqCoreEnd)
occAnalCase env scrut_maybe bndr alts
= case mapAndUnzip occ_anal_alt alts of { (alts_usage_s, alts') ->
let
alts_usage = foldr combineAltsUsageDetails emptyDetails alts_usage_s
(alts_usage1, tagged_bndr) = tag_case_bndr alts_usage bndr
in
(alts_usage1, Case tagged_bndr alts') }
where
-- Note [Case binder usage]
-- ~~~~~~~~~~~~~~~~~~~~~~~~
-- The case binder gets a usage of either "many" or "dead", never "one".
-- Reason: we like to inline single occurrences, to eliminate a binding,
-- but inlining a case binder *doesn't* eliminate a binding.
-- We *don't* want to transform
-- case x of w { (p,q) -> f w }
-- into
-- case x of w { (p,q) -> f (p,q) }
tag_case_bndr usage bndr
= case lookupVarEnv usage bndr of
Nothing -> (usage, setIdOccInfo bndr IAmDead)
Just _ -> (usage `delVarEnv` bndr, setIdOccInfo bndr NoOccInfo)
alt_env = mkAltEnv env scrut_maybe bndr
occ_anal_alt = occAnalAlt alt_env bndr
{-
Note [Use one-shot information]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The occurrrence analyser propagates one-shot-lambda information in two situation
* Applications: eg build (\cn -> blah)
Propagate one-shot info from the strictness signature of 'build' to
the \cn
* Let-bindings: eg let f = \c. let ... in \n -> blah
in (build f, build f)
Propagate one-shot info from the demanand-info on 'f' to the
lambdas in its RHS (which may not be syntactically at the top)
Some of this is done by the demand analyser, but this way it happens
much earlier, taking advantage of the strictness signature of
imported functions.
Note [Binders in case alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
case x of y { (a,b) -> f y }
We treat 'a', 'b' as dead, because they don't physically occur in the
case alternative. (Indeed, a variable is dead iff it doesn't occur in
its scope in the output of OccAnal.) It really helps to know when
binders are unused. See esp the call to isDeadBinder in
Simplify.mkDupableAlt
In this example, though, the Simplifier will bring 'a' and 'b' back to
life, beause it binds 'y' to (a,b) (imagine got inlined and
scrutinised y).
-}
occAnalAlt :: (OccEnv, Maybe (Id, SeqCoreTerm))
-> SeqCoreBndr
-> SeqCoreAlt
-> (UsageDetails, Alt IdWithOccInfo)
occAnalAlt (env, scrut_bind) case_bndr (Alt con bndrs rhs)
= case occAnalCommand env rhs of { (rhs_usage1, rhs1) ->
let
(rhs_usage2, rhs2) =
wrapProxy (occ_binder_swap env) scrut_bind case_bndr rhs_usage1 rhs1
(alt_usg, tagged_bndrs) = tagLamBinders rhs_usage2 bndrs
bndrs' = tagged_bndrs -- See Note [Binders in case alternatives]
in
(alt_usg, Alt con bndrs' rhs2) }
wrapProxy :: Bool -> Maybe (Id, SeqCoreTerm) -> Id -> UsageDetails -> SeqCoreCommand -> (UsageDetails, SeqCoreCommand)
wrapProxy enable_binder_swap (Just (scrut_var, rhs)) case_bndr body_usg body
| enable_binder_swap,
scrut_var `usedIn` body_usg
= ( body_usg' +++ unitVarEnv case_bndr NoOccInfo
, addLets [NonRec (BindTerm tagged_scrut_var rhs)] body )
where
(body_usg', tagged_scrut_var) = tagBinder body_usg scrut_var
wrapProxy _ _ _ body_usg body
= (body_usg, body)
{-
%************************************************************************
%* *
OccEnv
%* *
%************************************************************************
-}
data OccEnv
= OccEnv { occ_encl :: !OccEncl -- Enclosing context information
, occ_one_shots :: !OneShots -- Tells about linearity
, occ_gbl_scrut :: GlobalScruts
, occ_rule_act :: Activation -> Bool -- Which rules are active
-- See Note [Finding rule RHS free vars]
, occ_binder_swap :: !Bool -- enable the binder_swap
-- See CorePrep Note [Dead code in CorePrep]
}
type GlobalScruts = IdSet -- See Note [Binder swap on GlobalId scrutinees]
-----------------------------
-- OccEncl is used to control whether to inline into constructor arguments
-- For example:
-- x = (p,q) -- Don't inline p or q
-- y = /\a -> (p a, q a) -- Still don't inline p or q
-- z = f (p,q) -- Do inline p,q; it may make a rule fire
-- So OccEncl tells enought about the context to know what to do when
-- we encounter a contructor application or PAP.
data OccEncl
= OccRhs -- RHS of let(rec), albeit perhaps inside a type lambda
-- Don't inline into constructor args here
| OccVanilla -- Argument of function, body of lambda, scruintee of case etc.
-- Do inline into constructor args here
instance Outputable OccEncl where
ppr OccRhs = ptext (sLit "occRhs")
ppr OccVanilla = ptext (sLit "occVanilla")
type OneShots = [OneShotInfo]
-- [] No info
--
-- one_shot_info:ctxt Analysing a function-valued expression that
-- will be applied as described by one_shot_info
initOccEnv :: (Activation -> Bool) -> OccEnv
initOccEnv active_rule
= OccEnv { occ_encl = OccVanilla
, occ_one_shots = []
, occ_gbl_scrut = emptyVarSet -- PE emptyVarEnv emptyVarSet
, occ_rule_act = active_rule
, occ_binder_swap = True }
vanillaCtxt :: OccEnv -> OccEnv
vanillaCtxt env = env { occ_encl = OccVanilla, occ_one_shots = [] }
rhsCtxt :: OccEnv -> OccEnv
rhsCtxt env = env { occ_encl = OccRhs, occ_one_shots = [] }
argCtxt :: OccEnv -> [OneShots] -> (OccEnv, [OneShots])
argCtxt env []
= (env { occ_encl = OccVanilla, occ_one_shots = [] }, [])
argCtxt env (one_shots:one_shots_s)
= (env { occ_encl = OccVanilla, occ_one_shots = one_shots }, one_shots_s)
isRhsEnv :: OccEnv -> Bool
isRhsEnv (OccEnv { occ_encl = OccRhs }) = True
isRhsEnv (OccEnv { occ_encl = OccVanilla }) = False
oneShotGroup :: OccEnv -> [SeqCoreBndr]
-> ( OccEnv
, [SeqCoreBndr] )
-- The result binders have one-shot-ness set that they might not have had originally.
-- This happens in (build (\cn -> e)). Here the occurrence analyser
-- linearity context knows that c,n are one-shot, and it records that fact in
-- the binder. This is useful to guide subsequent float-in/float-out tranformations
oneShotGroup env@(OccEnv { occ_one_shots = ctxt }) bndrs
= go ctxt bndrs []
where
go ctxt [] rev_bndrs
= ( env { occ_one_shots = ctxt, occ_encl = OccVanilla }
, reverse rev_bndrs )
go [] bndrs rev_bndrs
= ( env { occ_one_shots = [], occ_encl = OccVanilla }
, reverse rev_bndrs ++ bndrs )
go ctxt (bndr:bndrs) rev_bndrs
| isId bndr
= case ctxt of
[] -> go [] bndrs (bndr : rev_bndrs)
(one_shot : ctxt) -> go ctxt bndrs (bndr': rev_bndrs)
where
bndr' = updOneShotInfo bndr one_shot
| otherwise
= go ctxt bndrs (bndr:rev_bndrs)
addAppCtxt :: OccEnv -> [SeqCoreTerm] -> OccEnv
addAppCtxt env@(OccEnv { occ_one_shots = ctxt }) args
= env { occ_one_shots = replicate (length (filter isValueArg args)) OneShotLam ++ ctxt }
transClosureFV :: UniqFM VarSet -> UniqFM VarSet
-- If (f,g), (g,h) are in the input, then (f,h) is in the output
-- as well as (f,g), (g,h)
transClosureFV env
| no_change = env
| otherwise = transClosureFV (listToUFM new_fv_list)
where
(no_change, new_fv_list) = mapAccumL bump True (ufmToList env)
bump no_change (b,fvs)
| no_change_here = (no_change, (b,fvs))
| otherwise = (False, (b,new_fvs))
where
(new_fvs, no_change_here) = extendFvs env fvs
-------------
extendFvs_ :: UniqFM VarSet -> VarSet -> VarSet
extendFvs_ env s = fst (extendFvs env s) -- Discard the Bool flag
extendFvs :: UniqFM VarSet -> VarSet -> (VarSet, Bool)
-- (extendFVs env s) returns
-- (s `union` env(s), env(s) `subset` s)
extendFvs env s
| isNullUFM env
= (s, True)
| otherwise
= (s `unionVarSet` extras, extras `subVarSet` s)
where
extras :: VarSet -- env(s)
extras = foldUFM unionVarSet emptyVarSet $
intersectUFM_C (\x _ -> x) env s
{-
%************************************************************************
%* *
Binder swap
%* *
%************************************************************************
Note [Binder swap]
~~~~~~~~~~~~~~~~~~
We do these two transformations right here:
(1) case x of b { pi -> ri }
==>
case x of b { pi -> let x=b in ri }
(2) case (x |> co) of b { pi -> ri }
==>
case (x |> co) of b { pi -> let x = b |> sym co in ri }
Why (2)? See Note [Case of cast]
In both cases, in a particular alternative (pi -> ri), we only
add the binding if
(a) x occurs free in (pi -> ri)
(ie it occurs in ri, but is not bound in pi)
(b) the pi does not bind b (or the free vars of co)
We need (a) and (b) for the inserted binding to be correct.
For the alternatives where we inject the binding, we can transfer
all x's OccInfo to b. And that is the point.
Notice that
* The deliberate shadowing of 'x'.
* That (a) rapidly becomes false, so no bindings are injected.
The reason for doing these transformations here is because it allows
us to adjust the OccInfo for 'x' and 'b' as we go.
* Suppose the only occurrences of 'x' are the scrutinee and in the
ri; then this transformation makes it occur just once, and hence
get inlined right away.
* If we do this in the Simplifier, we don't know whether 'x' is used
in ri, so we are forced to pessimistically zap b's OccInfo even
though it is typically dead (ie neither it nor x appear in the
ri). There's nothing actually wrong with zapping it, except that
it's kind of nice to know which variables are dead. My nose
tells me to keep this information as robustly as possible.
The Maybe (Id,CoreExpr) passed to occAnalAlt is the extra let-binding
{x=b}; it's Nothing if the binder-swap doesn't happen.
There is a danger though. Consider
let v = x +# y
in case (f v) of w -> ...v...v...
And suppose that (f v) expands to just v. Then we'd like to
use 'w' instead of 'v' in the alternative. But it may be too
late; we may have substituted the (cheap) x+#y for v in the
same simplifier pass that reduced (f v) to v.
I think this is just too bad. CSE will recover some of it.
Note [Case of cast]
~~~~~~~~~~~~~~~~~~~
Consider case (x `cast` co) of b { I# ->
... (case (x `cast` co) of {...}) ...
We'd like to eliminate the inner case. That is the motivation for
equation (2) in Note [Binder swap]. When we get to the inner case, we
inline x, cancel the casts, and away we go.
Note [Binder swap on GlobalId scrutinees]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the scrutinee is a GlobalId we must take care in two ways
i) In order to *know* whether 'x' occurs free in the RHS, we need its
occurrence info. BUT, we don't gather occurrence info for
GlobalIds. That's the reason for the (small) occ_gbl_scrut env in
OccEnv is for: it says "gather occurrence info for these".
ii) We must call localiseId on 'x' first, in case it's a GlobalId, or
has an External Name. See, for example, SimplEnv Note [Global Ids in
the substitution].
Note [Zap case binders in proxy bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
From the original
case x of cb(dead) { p -> ...x... }
we will get
case x of cb(live) { p -> let x = cb in ...x... }
Core Lint never expects to find an *occurrence* of an Id marked
as Dead, so we must zap the OccInfo on cb before making the
binding x = cb. See Trac #5028.
Historical note [no-case-of-case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We *used* to suppress the binder-swap in case expressions when
-fno-case-of-case is on. Old remarks:
"This happens in the first simplifier pass,
and enhances full laziness. Here's the bad case:
f = \ y -> ...(case x of I# v -> ...(case x of ...) ... )
If we eliminate the inner case, we trap it inside the I# v -> arm,
which might prevent some full laziness happening. I've seen this
in action in spectral/cichelli/Prog.hs:
[(m,n) | m <- [1..max], n <- [1..max]]
Hence the check for NoCaseOfCase."
However, now the full-laziness pass itself reverses the binder-swap, so this
check is no longer necessary.
Historical note [Suppressing the case binder-swap]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This old note describes a problem that is also fixed by doing the
binder-swap in OccAnal:
There is another situation when it might make sense to suppress the
case-expression binde-swap. If we have
case x of w1 { DEFAULT -> case x of w2 { A -> e1; B -> e2 }
...other cases .... }
We'll perform the binder-swap for the outer case, giving
case x of w1 { DEFAULT -> case w1 of w2 { A -> e1; B -> e2 }
...other cases .... }
But there is no point in doing it for the inner case, because w1 can't
be inlined anyway. Furthermore, doing the case-swapping involves
zapping w2's occurrence info (see paragraphs that follow), and that
forces us to bind w2 when doing case merging. So we get
case x of w1 { A -> let w2 = w1 in e1
B -> let w2 = w1 in e2
...other cases .... }
This is plain silly in the common case where w2 is dead.
Even so, I can't see a good way to implement this idea. I tried
not doing the binder-swap if the scrutinee was already evaluated
but that failed big-time:
data T = MkT !Int
case v of w { MkT x ->
case x of x1 { I# y1 ->
case x of x2 { I# y2 -> ...
Notice that because MkT is strict, x is marked "evaluated". But to
eliminate the last case, we must either make sure that x (as well as
x1) has unfolding MkT y1. THe straightforward thing to do is to do
the binder-swap. So this whole note is a no-op.
It's fixed by doing the binder-swap in OccAnal because we can do the
binder-swap unconditionally and still get occurrence analysis
information right.
-}
mkAltEnv :: OccEnv -> Maybe (Id, Maybe Coercion) -> Id -> (OccEnv, Maybe (Id, SeqCoreTerm))
-- Does two things: a) makes the occ_one_shots = OccVanilla
-- b) extends the GlobalScruts if possible
-- c) returns a proxy mapping, binding the scrutinee
-- to the case binder, if possible
mkAltEnv env@(OccEnv { occ_gbl_scrut = pe }) scrut case_bndr
= case scrut of
Just (v, Nothing) -> add_scrut v case_bndr'
Just (v, Just co) -> add_scrut v (mkCast case_bndr' (mkSymCo co))
-- See Note [Case of cast]
_ -> (env { occ_encl = OccVanilla }, Nothing)
where
add_scrut v rhs = ( env { occ_encl = OccVanilla, occ_gbl_scrut = pe `extendVarSet` v }
, Just (localise v, rhs) )
case_bndr' = Var (zapIdOccInfo case_bndr) -- See Note [Zap case binders in proxy bindings]
localise scrut_var = mkLocalId (localiseName (idName scrut_var)) (idType scrut_var)
-- Localise the scrut_var before shadowing it; we're making a
-- new binding for it, and it might have an External Name, or
-- even be a GlobalId; Note [Binder swap on GlobalId scrutinees]
-- Also we don't want any INLINE or NOINLINE pragmas!
{-
%************************************************************************
%* *
\subsection[OccurAnal-types]{OccEnv}
%* *
%************************************************************************
-}
type UsageDetails = IdEnv OccInfo -- A finite map from ids to their usage
-- INVARIANT: never IAmDead
-- (Deadness is signalled by not being in the map at all)
(+++), combineAltsUsageDetails
:: UsageDetails -> UsageDetails -> UsageDetails
(+++) usage1 usage2
= plusVarEnv_C addOccInfo usage1 usage2
combineAltsUsageDetails usage1 usage2
= plusVarEnv_C orOccInfo usage1 usage2
addOneOcc :: UsageDetails -> Id -> OccInfo -> UsageDetails
addOneOcc usage id info
= plusVarEnv_C addOccInfo usage (unitVarEnv id info)
-- ToDo: make this more efficient
emptyDetails :: UsageDetails
emptyDetails = (emptyVarEnv :: UsageDetails)
usedIn :: Id -> UsageDetails -> Bool
v `usedIn` details = isExportedId v || v `elemVarEnv` details
type IdWithOccInfo = Id
tagLamBinders :: UsageDetails -- Of scope
-> [Id] -- Binders
-> (UsageDetails, -- Details with binders removed
[IdWithOccInfo]) -- Tagged binders
-- Used for lambda and case binders
-- It copes with the fact that lambda bindings can have InlineRule
-- unfoldings, used for join points
tagLamBinders usage binders = usage' `seq` (usage', bndrs')
where
(usage', bndrs') = mapAccumR tag_lam usage binders
tag_lam usage bndr = (usage2, setBinderOcc usage bndr)
where
usage1 = usage `delVarEnv` bndr
usage2 | isId bndr = addIdOccs usage1 (Core.idUnfoldingVars bndr)
| otherwise = usage1
tagBinder :: UsageDetails -- Of scope
-> Id -- Binders
-> (UsageDetails, -- Details with binders removed
IdWithOccInfo) -- Tagged binders
tagBinder usage binder
= let
usage' = usage `delVarEnv` binder
binder' = setBinderOcc usage binder
in
usage' `seq` (usage', binder')
setBinderOcc :: UsageDetails -> SeqCoreBndr -> SeqCoreBndr
setBinderOcc usage bndr
| isTyVar bndr = bndr
| isExportedId bndr = case idOccInfo bndr of
NoOccInfo -> bndr
_ -> setIdOccInfo bndr NoOccInfo
-- Don't use local usage info for visible-elsewhere things
-- BUT *do* erase any IAmALoopBreaker annotation, because we're
-- about to re-generate it and it shouldn't be "sticky"
| otherwise = setIdOccInfo bndr occ_info
where
occ_info = lookupVarEnv usage bndr `orElse` IAmDead
{-
%************************************************************************
%* *
\subsection{Operations over OccInfo}
%* *
%************************************************************************
-}
mkOneOcc :: OccEnv -> Id -> InterestingCxt -> UsageDetails
mkOneOcc env id int_cxt
| isLocalId id
= unitVarEnv id (OneOcc False True int_cxt)
| id `elemVarEnv` occ_gbl_scrut env
= unitVarEnv id NoOccInfo
| otherwise
= emptyDetails
markMany, markInsideLam, markInsideSCC :: OccInfo -> OccInfo
markMany _ = NoOccInfo
markInsideSCC occ = markInsideLam occ
-- inside an SCC, we can inline lambdas only.
markInsideLam (OneOcc _ one_br int_cxt) = OneOcc True one_br int_cxt
markInsideLam occ = occ
addOccInfo, orOccInfo :: OccInfo -> OccInfo -> OccInfo
addOccInfo a1 a2 = assert( not (isDeadOcc a1 || isDeadOcc a2) )
NoOccInfo -- Both branches are at least One
-- (Argument is never IAmDead)
-- (orOccInfo orig new) is used
-- when combining occurrence info from branches of a case
orOccInfo (OneOcc in_lam1 _ int_cxt1)
(OneOcc in_lam2 _ int_cxt2)
= OneOcc (in_lam1 || in_lam2)
False -- False, because it occurs in both branches
(int_cxt1 && int_cxt2)
orOccInfo a1 a2 = assert( not (isDeadOcc a1 || isDeadOcc a2) )
NoOccInfo
| lukemaurer/sequent-core | src/Language/SequentCore/OccurAnal.hs | bsd-3-clause | 81,219 | 0 | 18 | 22,403 | 9,345 | 5,097 | 4,248 | 632 | 7 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TemplateHaskell #-}
module Fragment.TyArr.Ast.Error (
ErrExpectedTyArr(..)
, AsExpectedTyArr(..)
, expectTyArr
) where
import Control.Monad.Except (MonadError)
import Control.Monad.Error.Lens (throwing)
import Control.Lens (preview)
import Control.Lens.Prism (Prism')
import Control.Lens.TH (makePrisms)
import Ast.Type
import Ast.Error
import Fragment.TyArr.Ast.Type
data ErrExpectedTyArr ki ty a = ErrExpectedTyArr (Type ki ty a)
deriving (Eq, Ord, Show)
makePrisms ''ErrExpectedTyArr
class AsExpectedTyArr e ki ty a where -- | e -> ty, e -> a where
_ExpectedTyArr :: Prism' e (Type ki ty a)
instance AsExpectedTyArr (ErrExpectedTyArr ki ty a) ki ty a where
_ExpectedTyArr = _ErrExpectedTyArr
instance {-# OVERLAPPABLE #-} AsExpectedTyArr (ErrSum xs) ki ty a => AsExpectedTyArr (ErrSum (x ': xs)) ki ty a where
_ExpectedTyArr = _ErrNext . _ExpectedTyArr
instance {-# OVERLAPPING #-} AsExpectedTyArr (ErrSum (ErrExpectedTyArr ki ty a ': xs)) ki ty a where
_ExpectedTyArr = _ErrNow . _ExpectedTyArr
expectTyArr :: (MonadError e m, AsExpectedTyArr e ki ty a, AsTyArr ki ty) => Type ki ty a -> m (Type ki ty a, Type ki ty a)
expectTyArr ty =
case preview _TyArr ty of
Just (tyArg, tyRet) -> return (tyArg, tyRet)
_ -> throwing _ExpectedTyArr ty
| dalaing/type-systems | src/Fragment/TyArr/Ast/Error.hs | bsd-3-clause | 1,687 | 0 | 10 | 280 | 444 | 246 | 198 | 36 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module LiteralWithEnvVar ( script ) where
import Prelude
import Shell
-- This test ensures that we escape string literals properly if they
-- have an embedded variable expansion.
script :: ShellM ()
script = do
run $ command "echo" ["${SHELL}"]
return ()
| travitch/shellDSL | tests/inputs/LiteralWithEnvVar.hs | bsd-3-clause | 296 | 0 | 9 | 52 | 56 | 31 | 25 | 8 | 1 |
{-
<TEST>
{- MISSING HASH #-} -- {-# MISSING HASH #-}
<COMMENT> {- INLINE X -}
{- INLINE Y -} -- {-# INLINE Y #-}
{- INLINE[~k] f -} -- {-# INLINE[~k] f #-}
{- NOINLINE Y -} -- {-# NOINLINE Y #-}
{- UNKNOWN Y -}
<COMMENT> INLINE X
</TEST>
-}
module Hint.Comment(commentHint) where
import Hint.Type
import Data.Char
import Data.List
import Util
pragmas = words $
"LANGUAGE OPTIONS_GHC INCLUDE WARNING DEPRECATED MINIMAL INLINE NOINLINE INLINABLE " ++
"CONLIKE LINE SPECIALIZE SPECIALISE UNPACK NOUNPACK SOURCE"
commentHint :: Comment -> [Idea]
commentHint c@(Comment True span s)
| "#" `isSuffixOf` s && not ("#" `isPrefixOf` s) = [suggest "Fix pragma markup" c $ '#':s]
| name `elem` pragmas = [suggest "Use pragma syntax" c $ "# " ++ trim s ++ " #"]
where name = takeWhile (\x -> isAlphaNum x || x == '_') $ dropWhile isSpace s
commentHint _ = []
suggest :: String -> Comment -> String -> Idea
suggest msg (Comment typ pos s1) s2 = rawIdea Warning msg pos (f s1) (Just $ f s2) []
where f s = if typ then "{-" ++ s ++ "-}" else "--" ++ s
| fpco/hlint | src/Hint/Comment.hs | bsd-3-clause | 1,075 | 0 | 13 | 231 | 306 | 161 | 145 | 17 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.Provers.SExpr
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Parsing of S-expressions (mainly used for parsing SMT-Lib get-value output)
-----------------------------------------------------------------------------
module Data.SBV.Provers.SExpr where
import Control.Monad.Error () -- for Monad (Either String) instance
import Data.Char (isDigit, ord)
import Data.List (isPrefixOf)
import Numeric (readInt, readDec, readHex, fromRat)
import Data.SBV.BitVectors.AlgReals
import Data.SBV.BitVectors.Data (nan, infinity)
-- | ADT S-Expression format, suitable for representing get-model output of SMT-Lib
data SExpr = ECon String
| ENum Integer
| EReal AlgReal
| EFloat Float
| EDouble Double
| EApp [SExpr]
deriving Show
-- | Parse a string into an SExpr, potentially failing with an error message
parseSExpr :: String -> Either String SExpr
parseSExpr inp = do (sexp, extras) <- parse inpToks
if null extras
then return sexp
else die "Extra tokens after valid input"
where inpToks = let cln "" sofar = sofar
cln ('(':r) sofar = cln r (" ( " ++ sofar)
cln (')':r) sofar = cln r (" ) " ++ sofar)
cln (':':':':r) sofar = cln r (" :: " ++ sofar)
cln (c:r) sofar = cln r (c:sofar)
in reverse (map reverse (words (cln inp "")))
die w = fail $ "SBV.Provers.SExpr: Failed to parse S-Expr: " ++ w
++ "\n*** Input : <" ++ inp ++ ">"
parse [] = die "ran out of tokens"
parse ("(":toks) = do (f, r) <- parseApp toks []
f' <- cvt (EApp f)
return (f', r)
parse (")":_) = die "extra tokens after close paren"
parse [tok] = do t <- pTok tok
return (t, [])
parse _ = die "ill-formed s-expr"
parseApp [] _ = die "failed to grab s-expr application"
parseApp (")":toks) sofar = return (reverse sofar, toks)
parseApp ("(":toks) sofar = do (f, r) <- parse ("(":toks)
parseApp r (f : sofar)
parseApp (tok:toks) sofar = do t <- pTok tok
parseApp toks (t : sofar)
pTok "false" = return $ ENum 0
pTok "true" = return $ ENum 1
pTok ('0':'b':r) = mkNum $ readInt 2 (`elem` "01") (\c -> ord c - ord '0') r
pTok ('b':'v':r) = mkNum $ readDec (takeWhile (/= '[') r)
pTok ('#':'b':r) = mkNum $ readInt 2 (`elem` "01") (\c -> ord c - ord '0') r
pTok ('#':'x':r) = mkNum $ readHex r
pTok n
| not (null n) && isDigit (head n)
= if '.' `elem` n then getReal n
else mkNum $ readDec n
pTok n = return $ ECon n
mkNum [(n, "")] = return $ ENum n
mkNum _ = die "cannot read number"
getReal n = return $ EReal $ mkPolyReal (Left (exact, n'))
where exact = not ("?" `isPrefixOf` reverse n)
n' | exact = n
| True = init n
-- simplify numbers and root-obj values
cvt (EApp [ECon "/", EReal a, EReal b]) = return $ EReal (a / b)
cvt (EApp [ECon "/", EReal a, ENum b]) = return $ EReal (a / fromInteger b)
cvt (EApp [ECon "/", ENum a, EReal b]) = return $ EReal (fromInteger a / b)
cvt (EApp [ECon "/", ENum a, ENum b]) = return $ EReal (fromInteger a / fromInteger b)
cvt (EApp [ECon "-", EReal a]) = return $ EReal (-a)
cvt (EApp [ECon "-", ENum a]) = return $ ENum (-a)
-- bit-vector value as CVC4 prints: (_ bv0 16) for instance
cvt (EApp [ECon "_", ENum a, ENum _b]) = return $ ENum a
cvt (EApp [ECon "root-obj", EApp (ECon "+":trms), ENum k]) = do ts <- mapM getCoeff trms
return $ EReal $ mkPolyReal (Right (k, ts))
cvt (EApp [ECon "as", n, EApp [ECon "_", ECon "FP", ENum 11, ENum 53]]) = getDouble n
cvt (EApp [ECon "as", n, EApp [ECon "_", ECon "FP", ENum 8, ENum 24]]) = getFloat n
cvt x = return x
getCoeff (EApp [ECon "*", ENum k, EApp [ECon "^", ECon "x", ENum p]]) = return (k, p) -- kx^p
getCoeff (EApp [ECon "*", ENum k, ECon "x" ] ) = return (k, 1) -- kx
getCoeff ( EApp [ECon "^", ECon "x", ENum p] ) = return (1, p) -- x^p
getCoeff ( ECon "x" ) = return (1, 1) -- x
getCoeff ( ENum k ) = return (k, 0) -- k
getCoeff x = die $ "Cannot parse a root-obj,\nProcessing term: " ++ show x
getDouble (ECon s) = case (s, rdFP (dropWhile (== '+') s)) of
("plusInfinity", _ ) -> return $ EDouble infinity
("minusInfinity", _ ) -> return $ EDouble (-infinity)
("NaN", _ ) -> return $ EDouble nan
(_, Just v) -> return $ EDouble v
_ -> die $ "Cannot parse a double value from: " ++ s
getDouble (EReal r) = return $ EDouble $ fromRat $ toRational r
getDouble x = die $ "Cannot parse a double value from: " ++ show x
getFloat (ECon s) = case (s, rdFP (dropWhile (== '+') s)) of
("plusInfinity", _ ) -> return $ EFloat infinity
("minusInfinity", _ ) -> return $ EFloat (-infinity)
("NaN", _ ) -> return $ EFloat nan
(_, Just v) -> return $ EFloat v
_ -> die $ "Cannot parse a float value from: " ++ s
getFloat (EReal r) = return $ EFloat $ fromRat $ toRational r
getFloat x = die $ "Cannot parse a float value from: " ++ show x
-- | Parses the Z3 floating point formatted numbers like so: 1.321p5/1.2123e9 etc.
rdFP :: (Read a, RealFloat a) => String -> Maybe a
rdFP s = case break (`elem` "pe") s of
(m, 'p':e) -> rd m >>= \m' -> rd e >>= \e' -> return $ m' * ( 2 ** e')
(m, 'e':e) -> rd m >>= \m' -> rd e >>= \e' -> return $ m' * (10 ** e')
(m, "") -> rd m
_ -> Nothing
where rd v = case reads v of
[(n, "")] -> Just n
_ -> Nothing
| dylanmc/cryptol | sbv/Data/SBV/Provers/SExpr.hs | bsd-3-clause | 7,288 | 0 | 15 | 3,120 | 2,379 | 1,226 | 1,153 | 101 | 49 |
{-# LANGUAGE StandaloneDeriving, DeriveGeneric #-}
module Fragnix.Core.Slice where
import Data.Aeson (FromJSON, ToJSON)
import Data.Hashable (Hashable)
import Data.Text (Text)
import GHC.Generics (Generic)
-- Slices
type SliceID = Text
data Slice = Slice
{ sliceID :: SliceID
-- ^ The hash by which this slice is identified.
, language :: Language
-- ^ The language extensions which have to be enabled in order to
-- be able to compile this slice.
, fragment :: Fragment
-- ^ The actual source code contained in this slice.
, uses :: [Use]
-- ^ The dependencies of this slice.
, instances :: [Instance]
-- ^ The typeclass instances that this slide provides.
}
deriving instance Show Slice
deriving instance Eq Slice
deriving instance Ord Slice
deriving instance Generic Slice
instance ToJSON Slice
instance FromJSON Slice
-- Language
type GHCExtension = Text
-- | A list of GHC language extensions.
data Language = Language { extensions :: [GHCExtension] }
deriving instance Show Language
deriving instance Eq Language
deriving instance Ord Language
deriving instance Generic Language
instance ToJSON Language
instance FromJSON Language
instance Hashable Language
-- Fragment
type SourceCode = Text
-- | Haskell source code fragments.
data Fragment = Fragment [SourceCode]
deriving instance Show Fragment
deriving instance Eq Fragment
deriving instance Ord Fragment
deriving instance Generic Fragment
instance ToJSON Fragment
instance FromJSON Fragment
instance Hashable Fragment
-- Use
type Qualification = Text
data Use = Use
{ qualification :: (Maybe Qualification)
, usedName :: UsedName
, reference :: Reference
}
deriving instance Show Use
deriving instance Eq Use
deriving instance Ord Use
deriving instance Generic Use
instance ToJSON Use
instance FromJSON Use
instance Hashable Use
-- Instance
type InstanceID = SliceID
data Instance = Instance
{ instancePart :: InstancePart
, instanceID :: InstanceID
}
deriving instance Show Instance
deriving instance Eq Instance
deriving instance Ord Instance
deriving instance Generic Instance
instance ToJSON Instance
instance FromJSON Instance
instance Hashable Instance
-- Instance Part
data InstancePart =
OfThisClass |
OfThisClassForUnknownType |
ForThisType |
ForThisTypeOfUnknownClass
deriving instance Show InstancePart
deriving instance Eq InstancePart
deriving instance Ord InstancePart
deriving instance Generic InstancePart
instance ToJSON InstancePart
instance FromJSON InstancePart
instance Hashable InstancePart
-- UsedName
type TypeName = Name
data UsedName =
ValueName { valueName :: Name } |
TypeName { typeName :: Name } |
ConstructorName { constructorTypeName :: TypeName, constructorName :: Name }
deriving instance Show UsedName
deriving instance Eq UsedName
deriving instance Ord UsedName
deriving instance Generic UsedName
instance ToJSON UsedName
instance FromJSON UsedName
instance Hashable UsedName
-- Reference
type OriginalModule = Text
data Reference = OtherSlice SliceID | Builtin OriginalModule
deriving instance Show Reference
deriving instance Eq Reference
deriving instance Ord Reference
deriving instance Generic Reference
instance ToJSON Reference
instance FromJSON Reference
instance Hashable Reference
-- Name
data Name = Identifier Text | Operator Text
deriving instance Show Name
deriving instance Eq Name
deriving instance Ord Name
deriving instance Generic Name
instance ToJSON Name
instance FromJSON Name
instance Hashable Name
| phischu/fragnix | src/Fragnix/Core/Slice.hs | bsd-3-clause | 3,541 | 0 | 10 | 603 | 796 | 432 | 364 | 101 | 0 |
import Control.Arrow ( second )
import Control.Concurrent ( forkIO, threadDelay )
import Control.Exception ( mask_ )
import Control.Monad ( when )
import qualified Data.ByteString as B ( ByteString )
import qualified Data.ByteString.Char8 as C ( ByteString, putStrLn )
import Data.Char ( isDigit )
import Network.HaskellNet.IMAP.Connection as I ( IMAPConnection )
import Network.HaskellNet.IMAP as I ( list, select, search, SearchQuery(..), fetch
, connectIMAPPort, login , capability, close, logout
)
import Network.Socket as S ( HostName, PortNumber )
import System.Directory ( canonicalizePath )
import System.Environment ( getArgs )
import System.Exit ( exitFailure )
import System.IO ( Handle )
import System.IO.Error ( isDoesNotExistError )
import SSLWrap ( mapSSL, myForkIO )
import qualified Codec.MIME.String.QuotedPrintable as QP
import qualified Codec.Text.IConv as Iconv
import qualified Data.ByteString.Lazy.Char8 as BL
import Control.Exception (bracket)
main :: IO ()
main = do
let config_file = "config.txt"
parseConfig = map (second (drop 1) . break (=='=')) . lines
defaultConfig = unlines
[ "hostname=imap.gmail.com"
, "port=993"
, "[email protected]"
, "passwd=something"
, "ssl_wrap_port=3004"
, "cafile=/etc/ssl/certs/ca-certificates.crt"
]
opts <- catch (fmap parseConfig $ readFile config_file)$ \e -> do
when (isDoesNotExistError e)$ do
writeFile config_file defaultConfig
putStrLn$ unlines
[ ""
, "Thanks for using imapget!"
, ""
, "You need to edit a configuration file to specify where to connect"
, "and which username and password to use."
, ""
, "I just created a default ./"++config_file++" file."
, "Please edit it to set your options."
]
exitFailure
{- TODO Where did this repetition come from? Clean up -}
let readConfig name action = maybe (putStrLn$ "error: missing "++name++" option from "++config_file) action$ lookup name opts
readConfig "hostname"$ \hostname ->
readConfig "port"$ \port ->
readConfig "username"$ \username ->
readConfig "passwd"$ \passwd ->
readConfig "ssl_wrap_port"$ \ssl_wrap_port ->
readConfig "cafile"$ \cafile -> do
args <- getArgs
case args of
["list"] | all isDigit port ->
main' IMAPConf
{ icHostname = hostname
, icPort = fromIntegral (read port :: Int)
, icUsername = username
, icPasswd = passwd
, icSSLWrapPort = fromIntegral (read ssl_wrap_port :: Int)
, cafile = cafile
}
Nothing
["fetch",label] | all isDigit port ->
main' IMAPConf
{ icHostname =hostname
, icPort = fromIntegral (read port :: Int)
, icUsername = username
, icPasswd = passwd
, icSSLWrapPort = fromIntegral (read ssl_wrap_port :: Int)
, cafile = cafile
}
(Just label)
_ -> putStrLn "USAGE: imapget [list|fetch label]"
type UserName = String
type Password = String
type Label = String
data IMAPConf = IMAPConf
{ icHostname :: S.HostName
, icPort :: S.PortNumber
, icUsername :: UserName
, icPasswd :: Password
, icSSLWrapPort :: S.PortNumber
, cafile :: String
} deriving (Show)
decodeUtf7 :: String -> String
decodeUtf7 =
BL.unpack . (Iconv.convert "UTF-7" "UTF-8") . BL.pack
-- bad way to do different commands
main' :: IMAPConf -> Maybe Label -> IO ()
main' conf mlabel = do
case mlabel of
Nothing -> do
putStrLn$ "Fetching mailboxes ..."
withIMAP conf $ \ic -> do
I.list ic >>= mapM_ (putStrLn . show . decodeUtf7 . snd) -- snd
Just label -> do
putStrLn $ "Getting label " ++ label
getEmails conf label C.putStrLn
getEmails :: IMAPConf -> Label -> (B.ByteString -> IO a) -> IO ()
getEmails c label f = withIMAP c$ \ic -> do
putStrLn$ "Selecting "++label++" ..."
I.select ic label
putStrLn$ "Retrieving "++label++" ..."
I.search ic [ALLs]
-- >>= mapM_ (\uid -> I.fetch ic uid >>= f)
-- TODO parameterize this fetch stuff to work with range of Vmail style command
>>= mapM_ (\uid -> putStrLn $ show uid)
withIMAP :: IMAPConf -> (I.IMAPConnection -> IO a) -> IO a
withIMAP c action = do
-- launch thread for wrapping tcp with SSL
cafilePath <- canonicalizePath (cafile c)
putStrLn $ "Using cafile: "++cafilePath
-- _ <- mask_ $ forkIO $ mapSSL cafilePath (icSSLWrapPort c) (icHostname c) (icPort c)
forkIO $ mapSSL cafilePath (icSSLWrapPort c) (icHostname c) (icPort c)
-- start imap communication
threadDelay$ 500*1000
putStrLn$ "Connecting to "++icHostname c++":"++show (icPort c)++" (wrapping with ssl through localhost:"++show (icSSLWrapPort c)++") ..."
bracket
(connectIMAPPort "localhost" (icSSLWrapPort c))
I.logout
(\ic -> do
putStrLn$ "Authenticating user "++icUsername c++" ..."
I.login ic (icUsername c) (icPasswd c)
action ic
)
| danchoi/imapget | src/Main.hs | bsd-3-clause | 5,715 | 0 | 30 | 1,904 | 1,397 | 748 | 649 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Strict #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | @futhark dataset@
module Futhark.CLI.Dataset (main) where
import Control.Monad
import Control.Monad.ST
import qualified Data.Binary as Bin
import qualified Data.ByteString.Lazy.Char8 as BS
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Vector.Generic (freeze)
import qualified Data.Vector.Storable as SVec
import qualified Data.Vector.Storable.Mutable as USVec
import Data.Word
import qualified Futhark.Data as V
import Futhark.Data.Reader (readValues)
import Futhark.Util (convFloat)
import Futhark.Util.Options
import Language.Futhark.Parser
import Language.Futhark.Pretty ()
import Language.Futhark.Prop (UncheckedTypeExp)
import Language.Futhark.Syntax hiding
( FloatValue (..),
IntValue (..),
PrimValue (..),
Value,
ValueType,
)
import System.Exit
import System.IO
import System.Random (mkStdGen, uniformR)
import System.Random.Stateful (UniformRange (..))
-- | Run @futhark dataset@.
main :: String -> [String] -> IO ()
main = mainWithOptions initialDataOptions commandLineOptions "options..." f
where
f [] config
| null $ optOrders config = Just $ do
maybe_vs <- readValues <$> BS.getContents
case maybe_vs of
Nothing -> do
hPutStrLn stderr "Malformed data on standard input."
exitFailure
Just vs ->
case format config of
Text -> mapM_ (T.putStrLn . V.valueText) vs
Binary -> mapM_ (BS.putStr . Bin.encode) vs
Type -> mapM_ (T.putStrLn . V.valueTypeText . V.valueType) vs
| otherwise =
Just $
zipWithM_
($)
(optOrders config)
[fromIntegral (optSeed config) ..]
f _ _ =
Nothing
data OutputFormat
= Text
| Binary
| Type
deriving (Eq, Ord, Show)
data DataOptions = DataOptions
{ optSeed :: Int,
optRange :: RandomConfiguration,
optOrders :: [Word64 -> IO ()],
format :: OutputFormat
}
initialDataOptions :: DataOptions
initialDataOptions = DataOptions 1 initialRandomConfiguration [] Text
commandLineOptions :: [FunOptDescr DataOptions]
commandLineOptions =
[ Option
"s"
["seed"]
( ReqArg
( \n ->
case reads n of
[(n', "")] ->
Right $ \config -> config {optSeed = n'}
_ ->
Left $ do
hPutStrLn stderr $ "'" ++ n ++ "' is not an integer."
exitFailure
)
"SEED"
)
"The seed to use when initialising the RNG.",
Option
"g"
["generate"]
( ReqArg
( \t ->
case tryMakeGenerator t of
Right g ->
Right $ \config ->
config
{ optOrders =
optOrders config
++ [g (optRange config) (format config)]
}
Left err ->
Left $ do
hPutStrLn stderr err
exitFailure
)
"TYPE"
)
"Generate a random value of this type.",
Option
[]
["text"]
(NoArg $ Right $ \opts -> opts {format = Text})
"Output data in text format (must precede --generate).",
Option
"b"
["binary"]
(NoArg $ Right $ \opts -> opts {format = Binary})
"Output data in binary Futhark format (must precede --generate).",
Option
"t"
["type"]
(NoArg $ Right $ \opts -> opts {format = Type})
"Output the type (textually) rather than the value (must precede --generate).",
setRangeOption "i8" seti8Range,
setRangeOption "i16" seti16Range,
setRangeOption "i32" seti32Range,
setRangeOption "i64" seti64Range,
setRangeOption "u8" setu8Range,
setRangeOption "u16" setu16Range,
setRangeOption "u32" setu32Range,
setRangeOption "u64" setu64Range,
setRangeOption "f16" setf16Range,
setRangeOption "f32" setf32Range,
setRangeOption "f64" setf64Range
]
setRangeOption ::
Read a =>
String ->
(Range a -> RandomConfiguration -> RandomConfiguration) ->
FunOptDescr DataOptions
setRangeOption tname set =
Option
""
[name]
( ReqArg
( \b ->
let (lower, rest) = span (/= ':') b
upper = drop 1 rest
in case (reads lower, reads upper) of
([(lower', "")], [(upper', "")]) ->
Right $ \config ->
config {optRange = set (lower', upper') $ optRange config}
_ ->
Left $ do
hPutStrLn stderr $ "Invalid bounds for " ++ tname ++ ": " ++ b
exitFailure
)
"MIN:MAX"
)
$ "Range of " ++ tname ++ " values."
where
name = tname ++ "-bounds"
tryMakeGenerator ::
String ->
Either String (RandomConfiguration -> OutputFormat -> Word64 -> IO ())
tryMakeGenerator t
| Just vs <- readValues $ BS.pack t =
return $ \_ fmt _ -> mapM_ (outValue fmt) vs
| otherwise = do
t' <- toValueType =<< either (Left . show) Right (parseType name (T.pack t))
return $ \conf fmt seed -> do
let v = randomValue conf t' seed
outValue fmt v
where
name = "option " ++ t
outValue Text = T.putStrLn . V.valueText
outValue Binary = BS.putStr . Bin.encode
outValue Type = T.putStrLn . V.valueTypeText . V.valueType
toValueType :: UncheckedTypeExp -> Either String V.ValueType
toValueType TETuple {} = Left "Cannot handle tuples yet."
toValueType TERecord {} = Left "Cannot handle records yet."
toValueType TEApply {} = Left "Cannot handle type applications yet."
toValueType TEArrow {} = Left "Cannot generate functions."
toValueType TESum {} = Left "Cannot handle sumtypes yet."
toValueType TEDim {} = Left "Cannot handle existential sizes."
toValueType (TEUnique t _) = toValueType t
toValueType (TEArray t d _) = do
d' <- constantDim d
V.ValueType ds t' <- toValueType t
return $ V.ValueType (d' : ds) t'
where
constantDim (DimExpConst k _) = Right k
constantDim _ = Left "Array has non-constant dimension declaration."
toValueType (TEVar (QualName [] v) _)
| Just t <- lookup v m = Right $ V.ValueType [] t
where
m = map f [minBound .. maxBound]
f t = (nameFromText (V.primTypeText t), t)
toValueType (TEVar v _) =
Left $ "Unknown type " ++ pretty v
-- | Closed interval, as in @System.Random@.
type Range a = (a, a)
data RandomConfiguration = RandomConfiguration
{ i8Range :: Range Int8,
i16Range :: Range Int16,
i32Range :: Range Int32,
i64Range :: Range Int64,
u8Range :: Range Word8,
u16Range :: Range Word16,
u32Range :: Range Word32,
u64Range :: Range Word64,
f16Range :: Range Half,
f32Range :: Range Float,
f64Range :: Range Double
}
-- The following lines provide evidence about how Haskells record
-- system sucks.
seti8Range :: Range Int8 -> RandomConfiguration -> RandomConfiguration
seti8Range bounds config = config {i8Range = bounds}
seti16Range :: Range Int16 -> RandomConfiguration -> RandomConfiguration
seti16Range bounds config = config {i16Range = bounds}
seti32Range :: Range Int32 -> RandomConfiguration -> RandomConfiguration
seti32Range bounds config = config {i32Range = bounds}
seti64Range :: Range Int64 -> RandomConfiguration -> RandomConfiguration
seti64Range bounds config = config {i64Range = bounds}
setu8Range :: Range Word8 -> RandomConfiguration -> RandomConfiguration
setu8Range bounds config = config {u8Range = bounds}
setu16Range :: Range Word16 -> RandomConfiguration -> RandomConfiguration
setu16Range bounds config = config {u16Range = bounds}
setu32Range :: Range Word32 -> RandomConfiguration -> RandomConfiguration
setu32Range bounds config = config {u32Range = bounds}
setu64Range :: Range Word64 -> RandomConfiguration -> RandomConfiguration
setu64Range bounds config = config {u64Range = bounds}
setf16Range :: Range Half -> RandomConfiguration -> RandomConfiguration
setf16Range bounds config = config {f16Range = bounds}
setf32Range :: Range Float -> RandomConfiguration -> RandomConfiguration
setf32Range bounds config = config {f32Range = bounds}
setf64Range :: Range Double -> RandomConfiguration -> RandomConfiguration
setf64Range bounds config = config {f64Range = bounds}
initialRandomConfiguration :: RandomConfiguration
initialRandomConfiguration =
RandomConfiguration
(minBound, maxBound)
(minBound, maxBound)
(minBound, maxBound)
(minBound, maxBound)
(minBound, maxBound)
(minBound, maxBound)
(minBound, maxBound)
(minBound, maxBound)
(0.0, 1.0)
(0.0, 1.0)
(0.0, 1.0)
randomValue :: RandomConfiguration -> V.ValueType -> Word64 -> V.Value
randomValue conf (V.ValueType ds t) seed =
case t of
V.I8 -> gen i8Range V.I8Value
V.I16 -> gen i16Range V.I16Value
V.I32 -> gen i32Range V.I32Value
V.I64 -> gen i64Range V.I64Value
V.U8 -> gen u8Range V.U8Value
V.U16 -> gen u16Range V.U16Value
V.U32 -> gen u32Range V.U32Value
V.U64 -> gen u64Range V.U64Value
V.F16 -> gen f16Range V.F16Value
V.F32 -> gen f32Range V.F32Value
V.F64 -> gen f64Range V.F64Value
V.Bool -> gen (const (False, True)) V.BoolValue
where
gen range final = randomVector (range conf) final ds seed
randomVector ::
(SVec.Storable v, UniformRange v) =>
Range v ->
(SVec.Vector Int -> SVec.Vector v -> V.Value) ->
[Int] ->
Word64 ->
V.Value
randomVector range final ds seed = runST $ do
-- Use some nice impure computation where we can preallocate a
-- vector of the desired size, populate it via the random number
-- generator, and then finally reutrn a frozen binary vector.
arr <- USVec.new n
let fill g i
| i < n = do
let (v, g') = uniformR range g
USVec.write arr i v
g' `seq` fill g' $! i + 1
| otherwise =
pure ()
fill (mkStdGen $ fromIntegral seed) 0
final (SVec.fromList ds) . SVec.convert <$> freeze arr
where
n = product ds
-- XXX: The following instance is an orphan. Maybe it could be
-- avoided with some newtype trickery or refactoring, but it's so
-- convenient this way.
instance UniformRange Half where
uniformRM (a, b) g =
(convFloat :: Float -> Half) <$> uniformRM (convFloat a, convFloat b) g
| diku-dk/futhark | src/Futhark/CLI/Dataset.hs | isc | 10,492 | 0 | 25 | 2,807 | 3,018 | 1,584 | 1,434 | 271 | 12 |
{-# LANGUAGE OverloadedStrings #-}
module Config.Internal where
import qualified Data.Yaml as Y
import Data.Yaml ((.:))
import Control.Applicative ((<$>),(<*>),empty)
loadConfigFromFile :: String -> IO (Either Y.ParseException VanguardConfig)
loadConfigFromFile file = Y.decodeFileEither file
data VanguardConfig = VanguardConfig { net :: NetConfig
, control :: ControlConfig } deriving (Eq, Show)
data NetConfig = NetConfig { device :: String
, ip :: String } deriving (Eq, Show)
data ControlConfig = ControlConfig { socket :: String } deriving (Eq, Show)
instance Y.FromJSON VanguardConfig where
parseJSON (Y.Object v) = VanguardConfig <$> v .: "net" <*> v .: "control"
parseJSON _ = empty
instance Y.FromJSON NetConfig where
parseJSON (Y.Object v) = NetConfig <$> v .: "device" <*> v .: "address"
parseJSON _ = empty
instance Y.FromJSON ControlConfig where
parseJSON (Y.Object v) = ControlConfig <$> v .: "socket"
parseJSON _ = empty
| rlupton20/vanguard-dataplane | app/Config/Internal.hs | gpl-3.0 | 1,079 | 0 | 9 | 276 | 314 | 174 | 140 | 21 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="id-ID">
<title>Server-Sent Events | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Konten</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Telusuri</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorit</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/sse/src/main/javahelp/org/zaproxy/zap/extension/sse/resources/help_id_ID/helpset_id_ID.hs | apache-2.0 | 979 | 80 | 67 | 160 | 419 | 212 | 207 | -1 | -1 |
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MonoLocalBinds #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE UndecidableInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Test.StateMachine.Types
-- Copyright : (C) 2017, ATS Advanced Telematic Systems GmbH
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Stevan Andjelkovic <[email protected]>
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
--
-----------------------------------------------------------------------------
module Test.StateMachine.Types
( StateMachine(..)
, Command(..)
, getCommand
, Commands(..)
, NParallelCommands
, lengthCommands
, ParallelCommandsF(..)
, ParallelCommands
, Pair(..)
, fromPair
, toPair
, fromPair'
, toPairUnsafe'
, Reason(..)
, isOK
, noCleanup
, module Test.StateMachine.Types.Environment
, module Test.StateMachine.Types.GenSym
, module Test.StateMachine.Types.History
, module Test.StateMachine.Types.References
) where
import Data.Functor.Classes
(Ord1, Show1)
import Data.Semigroup
import Prelude
import Test.QuickCheck
(Gen)
import Test.StateMachine.Logic
import Test.StateMachine.Types.Environment
import Test.StateMachine.Types.GenSym
import Test.StateMachine.Types.History
import Test.StateMachine.Types.References
------------------------------------------------------------------------
data StateMachine model cmd m resp = StateMachine
{ initModel :: forall r. model r
, transition :: forall r. (Show1 r, Ord1 r) => model r -> cmd r -> resp r -> model r
, precondition :: model Symbolic -> cmd Symbolic -> Logic
, postcondition :: model Concrete -> cmd Concrete -> resp Concrete -> Logic
, invariant :: Maybe (model Concrete -> Logic)
, generator :: model Symbolic -> Maybe (Gen (cmd Symbolic))
, shrinker :: model Symbolic -> cmd Symbolic -> [cmd Symbolic]
, semantics :: cmd Concrete -> m (resp Concrete)
, mock :: model Symbolic -> cmd Symbolic -> GenSym (resp Symbolic)
, cleanup :: model Concrete -> m ()
}
noCleanup :: Monad m => model Concrete -> m ()
noCleanup _ = return ()
-- | Previously symbolically executed command
--
-- Invariant: the variables must be the variables in the response.
data Command cmd resp = Command !(cmd Symbolic) !(resp Symbolic) ![Var]
getCommand :: Command cmd resp -> cmd Symbolic
getCommand (Command cmd _resp _vars) = cmd
deriving
stock
instance (Show (cmd Symbolic), Show (resp Symbolic)) => Show (Command cmd resp)
deriving
stock
instance (Read (cmd Symbolic), Read (resp Symbolic)) => Read (Command cmd resp)
deriving
stock
instance ((Eq (cmd Symbolic)), (Eq (resp Symbolic))) => Eq (Command cmd resp)
newtype Commands cmd resp = Commands
{ unCommands :: [Command cmd resp] }
deriving newtype (Semigroup, Monoid)
deriving
stock
instance (Show (cmd Symbolic), Show (resp Symbolic)) => Show (Commands cmd resp)
deriving
stock
instance (Read (cmd Symbolic), Read (resp Symbolic)) => Read (Commands cmd resp)
deriving
stock
instance ((Eq (cmd Symbolic)), (Eq (resp Symbolic))) => Eq (Commands cmd resp)
lengthCommands :: Commands cmd resp -> Int
lengthCommands = length . unCommands
data Reason
= Ok
| PreconditionFailed String
| PostconditionFailed String
| InvariantBroken String
| ExceptionThrown String
| MockSemanticsMismatch
deriving stock (Eq, Show)
isOK :: Reason -> Bool
isOK Ok = True
isOK _ = False
data ParallelCommandsF t cmd resp = ParallelCommands
{ prefix :: !(Commands cmd resp)
, suffixes :: [t (Commands cmd resp)]
}
deriving
stock
instance (Eq (cmd Symbolic), Eq (resp Symbolic), Eq (t (Commands cmd resp)))
=> Eq (ParallelCommandsF t cmd resp)
deriving
stock
instance (Show (cmd Symbolic), Show (resp Symbolic), Show (t (Commands cmd resp)))
=> Show (ParallelCommandsF t cmd resp)
data Pair a = Pair
{ proj1 :: !a
, proj2 :: !a
}
deriving stock (Eq, Ord, Show, Functor, Foldable, Traversable)
fromPair :: Pair a -> (a, a)
fromPair (Pair x y) = (x, y)
toPair :: (a, a) -> Pair a
toPair (x, y) = Pair x y
type ParallelCommands = ParallelCommandsF Pair
type NParallelCommands = ParallelCommandsF []
fromPair' :: ParallelCommandsF Pair cmd resp -> ParallelCommandsF [] cmd resp
fromPair' p = p { suffixes = (\(Pair l r) -> [l, r]) <$> suffixes p}
toPairUnsafe' :: ParallelCommandsF [] cmd resp -> ParallelCommandsF Pair cmd resp
toPairUnsafe' p = p { suffixes = unsafePair <$> suffixes p}
where
unsafePair [a,b] = Pair a b
unsafePair _ = error "invariant violation! Shrunk list should always have 2 elements."
| advancedtelematic/quickcheck-state-machine-model | src/Test/StateMachine/Types.hs | bsd-3-clause | 5,165 | 0 | 14 | 1,141 | 1,461 | 808 | 653 | 131 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Ros.Sensor_msgs.Range where
import qualified Prelude as P
import Prelude ((.), (+), (*))
import qualified Data.Typeable as T
import Control.Applicative
import Ros.Internal.RosBinary
import Ros.Internal.Msg.MsgInfo
import qualified GHC.Generics as G
import qualified Data.Default.Generics as D
import Ros.Internal.Msg.HeaderSupport
import qualified Data.Word as Word
import qualified Ros.Std_msgs.Header as Header
import Lens.Family.TH (makeLenses)
import Lens.Family (view, set)
data Range = Range { _header :: Header.Header
, _radiation_type :: Word.Word8
, _field_of_view :: P.Float
, _min_range :: P.Float
, _max_range :: P.Float
, _range :: P.Float
} deriving (P.Show, P.Eq, P.Ord, T.Typeable, G.Generic)
$(makeLenses ''Range)
instance RosBinary Range where
put obj' = put (_header obj') *> put (_radiation_type obj') *> put (_field_of_view obj') *> put (_min_range obj') *> put (_max_range obj') *> put (_range obj')
get = Range <$> get <*> get <*> get <*> get <*> get <*> get
putMsg = putStampedMsg
instance HasHeader Range where
getSequence = view (header . Header.seq)
getFrame = view (header . Header.frame_id)
getStamp = view (header . Header.stamp)
setSequence = set (header . Header.seq)
instance MsgInfo Range where
sourceMD5 _ = "c005c34273dc426c67a020a87bc24148"
msgTypeName _ = "sensor_msgs/Range"
instance D.Default Range
ultrasound :: Word.Word8
ultrasound = 0
infrared :: Word.Word8
infrared = 1
| acowley/roshask | msgs/Sensor_msgs/Ros/Sensor_msgs/Range.hs | bsd-3-clause | 1,705 | 1 | 13 | 357 | 487 | 280 | 207 | 43 | 1 |
{-# LANGUAGE Unsafe #-}
{-# LANGUAGE NoImplicitPrelude, AutoDeriveTypeable, RoleAnnotations #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IOArray
-- Copyright : (c) The University of Glasgow 2008
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (GHC Extensions)
--
-- The IOArray type
--
-----------------------------------------------------------------------------
module GHC.IOArray (
IOArray(..),
newIOArray, unsafeReadIOArray, unsafeWriteIOArray,
readIOArray, writeIOArray,
boundsIOArray
) where
import GHC.Base
import GHC.IO
import GHC.Arr
import Data.Typeable.Internal
-- ---------------------------------------------------------------------------
-- | An 'IOArray' is a mutable, boxed, non-strict array in the 'IO' monad.
-- The type arguments are as follows:
--
-- * @i@: the index type of the array (should be an instance of 'Ix')
--
-- * @e@: the element type of the array.
--
--
newtype IOArray i e = IOArray (STArray RealWorld i e) deriving( Typeable )
-- index type should have a nominal role due to Ix class. See also #9220.
type role IOArray nominal representational
-- explicit instance because Haddock can't figure out a derived one
instance Eq (IOArray i e) where
IOArray x == IOArray y = x == y
-- |Build a new 'IOArray'
newIOArray :: Ix i => (i,i) -> e -> IO (IOArray i e)
{-# INLINE newIOArray #-}
newIOArray lu initial = stToIO $ do {marr <- newSTArray lu initial; return (IOArray marr)}
-- | Read a value from an 'IOArray'
unsafeReadIOArray :: Ix i => IOArray i e -> Int -> IO e
{-# INLINE unsafeReadIOArray #-}
unsafeReadIOArray (IOArray marr) i = stToIO (unsafeReadSTArray marr i)
-- | Write a new value into an 'IOArray'
unsafeWriteIOArray :: Ix i => IOArray i e -> Int -> e -> IO ()
{-# INLINE unsafeWriteIOArray #-}
unsafeWriteIOArray (IOArray marr) i e = stToIO (unsafeWriteSTArray marr i e)
-- | Read a value from an 'IOArray'
readIOArray :: Ix i => IOArray i e -> i -> IO e
readIOArray (IOArray marr) i = stToIO (readSTArray marr i)
-- | Write a new value into an 'IOArray'
writeIOArray :: Ix i => IOArray i e -> i -> e -> IO ()
writeIOArray (IOArray marr) i e = stToIO (writeSTArray marr i e)
{-# INLINE boundsIOArray #-}
boundsIOArray :: IOArray i e -> (i,i)
boundsIOArray (IOArray marr) = boundsSTArray marr
| alexander-at-github/eta | libraries/base/GHC/IOArray.hs | bsd-3-clause | 2,530 | 0 | 10 | 459 | 518 | 282 | 236 | 33 | 1 |
import System.Environment
import System.IO
import Text.ParserCombinators.Parsec
import Control.Monad
import Data.ByteString.Lazy.Char8 as BS hiding (length,take,drop,filter,head,concat)
import Control.Applicative hiding ((<|>), many)
import Data.List as DL
import Data.Char
import Text.Printf
{--
han(Open usp Tukubai)
designed by Nobuaki Tounaka
written by Ryuichi Ueda
The MIT License
Copyright (C) 2012 Universal Shell Programming Laboratory
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
--}
showUsage :: IO ()
showUsage = do System.IO.hPutStr stderr ("Usage : han <f1> <f2> ... <file>\n" ++
"Thu Aug 15 21:44:50 JST 2013\n" ++
"Open usp Tukubai (LINUX+FREEBSD), Haskell ver.\n")
main :: IO ()
main = do args <- getArgs
case args of
["-h"] -> showUsage
["--help"] -> showUsage
_ -> mainProc (setOpts args)
{--
[a] -> readF a >>= simpleHan
[a,b] -> readF "-" >>= mainProc (read a) (read b)
[a,b,c] -> readF c >>= mainProc (read a) (read b)
--}
mainProc :: Opts -> IO ()
mainProc (Opts fs file) = readF file >>= mainProc' fs
mainProc' :: [Int] -> BS.ByteString -> IO ()
mainProc' fs cs = mainProc'' fs (BS.lines cs)
mainProc'' :: [Int] -> [BS.ByteString] -> IO ()
mainProc'' fs [] = do return ()
mainProc'' fs (ln:lns) = han fs (myWords ln) >> mainProc'' fs lns
type Words = [BS.ByteString]
type Word = BS.ByteString
han :: [Int] -> Words -> IO ()
han [] wds = BS.putStrLn $ BS.unwords $ DL.map han' wds
han fs wds = BS.putStrLn $ BS.unwords $ hanFs fs (DL.zip [1..] wds)
hanFs :: [Int] -> [(Int,Word)] -> Words
hanFs fs [] = []
hanFs fs ((n,w):ws) = (if x == [] then w else han' w) : hanFs fs ws
where x = filter (== n) fs
han' :: Word -> Word
han' w = BS.pack $ han'' $ BS.unpack w
han'' :: String -> String
han'' [] = []
han'' cs = cng ++ han'' str
where x = takeChar cs
c = fst x
str = snd x
cng = if length c == 3 then (fromUnicode $ toHancode $ toUnicode c) else c
toUnicode :: String -> Int
toUnicode (a:b:c:[]) = upper*256 + under
where upper = (oa `mod` 16)*16 + ((ob `div` 4) `mod` 16)
under = (ob `mod` 4)*64 + (oc `mod` 64)
oa = ord a
ob = ord b
oc = ord c
{--
toUnicode (a:b:[]) = upper*256 + under
where upper = (oa `div` 4) `mod` 8
under = (oa `mod` 4)*64 + (ob `mod` 64)
oa = ord a
ob = ord b
--}
fromUnicode :: Int -> String
fromUnicode n
| n < 128 = [chr n]
| n < 0x800 = from2Byte n
| n > 0x200000 = (fromUnicode (n - 0x200000)) ++ fromUnicode 0x309A
| n > 0x100000 = (fromUnicode (n - 0x100000)) ++ fromUnicode 0x3099
| otherwise = from3Byte n
from2Byte :: Int -> String
from2Byte n = DL.map chr [a,b]
where upper = n `div` 256
under = n `mod` 256
a = 0xC0 + (upper `mod` 8)*4 + (under `div` 64)
b = 128 + (under `mod` 64)
from3Byte :: Int -> String
from3Byte n = DL.map chr [a,b,c]
where upper = n `div` 256
under = n `mod` 256
a = 0xE0 + (upper `div` 16)
b = 128 + (upper `mod` 16)*4 + (under `div` 64)
c = 128 + (under `mod` 64)
punclist = [(0x3001,0xFF64),(0x3002,0xFF61),(0x300C,0xFF62),(0x300D,0xFF63)]
panalist = [(0x30D1,0xFF8A), (0x30D4,0xFF8B),(0x30D7,0xFF8C), (0x30DA,0xFF8D), (0x30DD,0xFF8E)]
ganalist = [(0x30AC,0xFF76), (0x30AE,0xFF77), (0x30B0,0xFF78),
(0x30B2,0xFF79), (0x30B4,0xFF7A), (0x30B6,0xFF7B), (0x30B8,0xFF7C), (0x30BA,0xFF7D),
(0x30BC,0xFF7E), (0x30BE,0xFF7F), (0x30C0,0xFF80), (0x30C2,0xFF81), (0x30C4,0xFF6F),
(0x30C5,0xFF82), (0x30C7,0xFF83), (0x30C9,0xFF84), (0x30D0,0xFF8A), (0x30D3,0xFF8B),
(0x30D6,0xFF8C), (0x30D9,0xFF8D), (0x30DC,0xFF8E), (0x30F4,0xFF73), (0x30F7,0xFF9C)]
kanalist = [(0x3099,0xFF9E), (0x309A,0xFF9F), (0x30A1,0xFF67), (0x30A2,0xFF71), (0x30A3,0xFF68),
(0x30A4,0xFF72), (0x30A5,0xFF69), (0x30A6,0xFF73), (0x30A7,0xFF6A), (0x30A8,0xFF74),
(0x30A9,0xFF6B), (0x30AA,0xFF75), (0x30AB,0xFF76), (0x30AD,0xFF77), (0x30AF,0xFF78),
(0x30B1,0xFF79), (0x30B3,0xFF7A), (0x30B5,0xFF7B), (0x30B7,0xFF7C), (0x30B9,0xFF7D),
(0x30BB,0xFF7E), (0x30BD,0xFF7F), (0x30BF,0xFF80), (0x30C1,0xFF81), (0x30C3,0xFF6F),
(0x30C4,0xFF82), (0x30C6,0xFF83), (0x30C8,0xFF84), (0x30CA,0xFF85), (0x30CB,0xFF86),
(0x30CC,0xFF87), (0x30CD,0xFF88), (0x30CE,0xFF89), (0x30CF,0xFF8A), (0x30D2,0xFF8B),
(0x30D5,0xFF8C), (0x30D8,0xFF8D), (0x30DB,0xFF8E), (0x30DE,0xFF8F), (0x30DF,0xFF90),
(0x30E0,0xFF91), (0x30E1,0xFF92), (0x30E2,0xFF93), (0x30E3,0xFF6C), (0x30E4,0xFF94),
(0x30E5,0xFF6D), (0x30E6,0xFF95), (0x30E7,0xFF6E), (0x30E8,0xFF96), (0x30E9,0xFF97),
(0x30EA,0xFF98), (0x30EB,0xFF99), (0x30EC,0xFF9A), (0x30ED,0xFF9B), (0x30EF,0xFF9C),
(0x30F2,0xFF66), (0x30F3,0xFF9D), (0x30FB,0xFF65), (0x30FC,0xFF70)]
symblist = [(0xFFE0,0x00A2), (0xFFE1,0x00A3), (0xFFE2,0x00AC), (0xFFE3,0x00AF), (0xFFE4,0x00A6),
(0xFFE5,0x00A5), (0xFFE6,0x20A9)]
toHancode :: Int -> Int
toHancode c
| c < 128 = c
| c == 0x02DC = 0x7E --tilde
| c == 0x223C = 0x7E --tilde
| c == 0x3000 = 0x20 --space
| c >= 0x3001 && c <= 0x300D = getList c punclist
| c == 0x301C = 0x7E --tilde
| c >= 0x3099 && c <= 0x30FC = if z == c then (if y == c then x + 0x100000 else y + 0x200000) else z
| c >= 0xFF01 && c <= 0xFF5E = c + 0x21 - 0xFF01
| c == 0xFF5F = 0x2985 --Fullwidth brackets
| c == 0xFF60 = 0x2986 --Fullwidth brackets
| c >= 0xFFE0 && c <= 0xFFE6 = getList c symblist
| otherwise = c
where x = getList c ganalist
y = getList c panalist
z = getList c kanalist
getList :: Int -> [(Int,Int)] -> Int
getList n list = if ans == [] then n else snd $ head $ ans
where f v (a,b) = v == a
ans = filter (f n) list
takeChar :: String -> (String,String)
takeChar [] = error "wrong cut pos"
takeChar (c:[]) = ([c],[])
takeChar (c:cs)
| ord c < 128 = ([c],cs)
| (ord c) `div` 32 == 6 = take2Char (c:cs)
| (ord c) `div` 16 == 14 = take3Char (c:cs)
| (ord c) `div` 8 == 30 = take4Char (c:cs)
take2Char (a:b:c) = ((a:b:[]),c)
take3Char (a:b:c:d) = ((a:b:c:[]),d)
take4Char (a:b:c:d:e) = ((a:b:c:d:[]),e)
readF :: String -> IO BS.ByteString
readF "-" = BS.getContents
readF f = BS.readFile f
myWords :: BS.ByteString -> [BS.ByteString]
myWords line = filter (/= BS.pack "") $ BS.split ' ' line
data Opts = Opts [Int] String | Error String deriving Show
setOpts :: [String] -> Opts
setOpts as = case parse args "" ((Prelude.unwords as) ++ " ") of
Right opt -> opt
Left err -> Error ( show err )
args = Opts <$> many num <*> (try(filename) <|> return "-")
num = do a <- many1 digit
char ' '
return (read a)
filename = many1 ( try(letter) <|> try(digit) <|> symbol ) >>= return
symbol = oneOf "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~"
| ShellShoccar-jpn/Open-usp-Tukubai | COMMANDS.HS/han.hs | mit | 8,489 | 11 | 12 | 2,356 | 3,124 | 1,777 | 1,347 | 140 | 3 |
{-# OPTIONS_GHC -cpp #-}
{-+
Instance declarations in the source code are assigned names and
added to the instance database, which gets used during context reduction.
-}
module TiInstanceDB(
IDB,Instance,InstEntry(..),emptyIdb,extendIdb,classInstances,findInst,findInst',
addInstKind,instType)
where
import TiTypes(Type,Pred,Subst(..),Types(..),funT,HsIdentI(..),Typing(..),
Kinded,kinded,unQual,forall')
import TiSolve()
import Unification(match,unify)
import Data.Maybe(mapMaybe,isJust)
--import HsIdent -- hmm
import PrettyPrint
import SpecialNames
import TiDefinedNames(definedTypeName,optDefinedTypeName)
import MUtils(( # ),mapPartition)
--import Debug.Trace(trace) -- debug
import Map60204
#if __GLASGOW_HASKELL__ >= 604
import qualified Data.Map as M (Map)
newtype IDB i = Idb (M.Map i [Instance i]) --deriving Show
#else
import qualified Data.FiniteMap as M (FiniteMap)
newtype IDB i = Idb (M.FiniteMap i [Instance i]) --deriving Show
#endif
{-+
The instance database is simpliy a list of instances. An instance like
#instance (Show a,Show b) => Show (Either a b)#
might be represented in the database as
#(Show (Either a b),(inst_Show_Either,[Show a,Show b]))#
-}
type Instance i = (Pred i,InstEntry i)
data InstEntry i = IE i [Kinded i] [Pred i] deriving (Eq,Show,Read)
instClass (hd,_) = definedTypeName hd
instHead (ih,_) = ih
--instName (_,(n,_)) = n
instType (c,IE v gs ctx) = HsVar v:>:forall' gs (unQual (funT (ctx++[c])))
addInstKind ks (c,(i,ctx)) = (c,IE i (kinded ks (tv (c,ctx))) ctx)
emptyIdb = Idb emptyM
--extendIdb1 inst (Idb idb) = Idb (inst:idb)
extendIdb insts (Idb idb) = Idb (addListTo_CM (++) idb cinsts)
where
cinsts = [(instClass i,[i])|i<-insts]
--namesIdb (Idb idb) = [dn|(_,(dn,_))<-idb]
classInstances (Idb idb) k = findWithDefaultM [] k idb
findInst idb = findInst' True idb
findInst' delayIfOverlap (Idb idb) pred =
--trace (pp debugmsg)
pick
where
{-
debugmsg =
"findInst "<+>pred $$
nest 4 (vcat [
"Applicable now: "<+>some (map (fst.fst) nowInsts),
"Applicable later:"<+>some (map fst laterInsts),
"Pick: "<+>some pick,
"Other: "<+> if null laterInsts
then some (map fst otherInsts)
else ppi (length otherInsts - length laterInsts)])
where
some xs = length xs <+> vcat (take 5 xs)
-}
pick = map instantiate (if delayIfOverlap
then handleOverlapping
else nowInsts)
(otherInsts,nowInsts) = mapPartition matchInst insts
where
matchInst inst =
maybe (Left inst) (Right . (,) inst)
$ match [(instHead inst,pred)]
-- Instances in the same class, or...
insts = maybe allInsts ( \k-> findWithDefaultM [] k idb) $
optPredClass pred
where
-- Used when looking for an instance in an unknown class!
allInsts = concat (elemsM idb)
laterInsts = mapMaybe laterInst otherInsts
where laterInst inst = const inst # unify [(pred,instHead inst)]
instantiate ((ip,IE dn gs ips),s) =
((dn,su ips),((gs,su ip),S s))
where su x = apply (S s) x
-- Support for overlapping instances:
handleOverlapping =
if null laterInsts
then findMostSpecific nowInsts
else []
findMostSpecific is = filter isMostSpecific is
where
isMostSpecific i = all (i `mst`) is
(i1,_) `mst` (i2,_) = i1 `moreSpecificThan` i2
i1 `moreSpecificThan` i2 = isJust (match [(instHead i2,instHead i1)])
optPredClass p = optDefinedTypeName p
----
instance Show i => Show (IDB i) where
showsPrec _ (Idb insts) = shows (toListM insts)
instance (IsSpecialName i,Printable i) => Printable (IDB i) where
ppi (Idb insts) = vcat [pinst i|is<- elemsM insts,i<-is]
pinst (t,IE dn gs ctx) = dn<+>"= instance"<+>ctx<+>"=>"<+>t
instance Printable i => Printable (InstEntry i)
| kmate/HaRe | old/tools/base/TI/TiInstanceDB.hs | bsd-3-clause | 3,912 | 13 | 15 | 869 | 1,125 | 619 | 506 | 59 | 3 |
module LiftToToplevel.WhereIn7 where
--A definition can be lifted from a where or let to the top level binding group.
--Lifting a definition widens the scope of the definition.
--In this example, lift 'addthree' defined in 'fun'.
--This example aims to test adding parenthese.
fun x y z =inc addthree
where inc a =a +1
addthree=x+y+z
| kmate/HaRe | test/testdata/LiftToToplevel/WhereIn7.hs | bsd-3-clause | 358 | 0 | 8 | 79 | 52 | 29 | 23 | 4 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -Wincomplete-patterns #-}
module T15385 where
import Data.Type.Equality
data T a where
TInt :: T Int
TBool :: T Bool
f1, f2 :: a :~: Int -> T a -> ()
f1 eq t
| Refl <- eq
= case t of
TInt -> ()
f2 eq t
= if | Refl <- eq
-> case t of
TInt -> ()
| sdiehl/ghc | testsuite/tests/pmcheck/should_compile/T15385.hs | bsd-3-clause | 382 | 0 | 10 | 114 | 126 | 68 | 58 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
--
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements. See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership. The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied. See the License for the
-- specific language governing permissions and limitations
-- under the License.
--
module Thrift.Protocol
( Protocol(..)
, StatelessProtocol(..)
, ProtocolExn(..)
, ProtocolExnType(..)
, getTypeOf
, runParser
, bsToDouble
, bsToDoubleLE
) where
import Control.Exception
import Data.Attoparsec.ByteString
import Data.Bits
import Data.ByteString.Unsafe
import Data.Functor ((<$>))
import Data.Int
import Data.Monoid (mempty)
import Data.Text.Lazy (Text)
import Data.Typeable (Typeable)
import Data.Word
import Foreign.Ptr (castPtr)
import Foreign.Storable (peek, poke)
import System.IO.Unsafe
import qualified Data.ByteString as BS
import qualified Data.HashMap.Strict as Map
import qualified Data.ByteString.Lazy as LBS
import Thrift.Transport
import Thrift.Types
class Protocol a where
readByte :: a -> IO LBS.ByteString
readVal :: a -> ThriftType -> IO ThriftVal
readMessage :: a -> ((Text, MessageType, Int32) -> IO b) -> IO b
writeVal :: a -> ThriftVal -> IO ()
writeMessage :: a -> (Text, MessageType, Int32) -> IO () -> IO ()
class Protocol a => StatelessProtocol a where
serializeVal :: a -> ThriftVal -> LBS.ByteString
deserializeVal :: a -> ThriftType -> LBS.ByteString -> ThriftVal
data ProtocolExnType
= PE_UNKNOWN
| PE_INVALID_DATA
| PE_NEGATIVE_SIZE
| PE_SIZE_LIMIT
| PE_BAD_VERSION
| PE_NOT_IMPLEMENTED
| PE_MISSING_REQUIRED_FIELD
deriving ( Eq, Show, Typeable )
data ProtocolExn = ProtocolExn ProtocolExnType String
deriving ( Show, Typeable )
instance Exception ProtocolExn
getTypeOf :: ThriftVal -> ThriftType
getTypeOf v = case v of
TStruct{} -> T_STRUCT Map.empty
TMap{} -> T_MAP T_VOID T_VOID
TList{} -> T_LIST T_VOID
TSet{} -> T_SET T_VOID
TBool{} -> T_BOOL
TByte{} -> T_BYTE
TI16{} -> T_I16
TI32{} -> T_I32
TI64{} -> T_I64
TString{} -> T_STRING
TBinary{} -> T_BINARY
TDouble{} -> T_DOUBLE
runParser :: (Protocol p, Show a) => p -> Parser a -> IO a
runParser prot p = refill >>= getResult . parse p
where
refill = handle handleEOF $ LBS.toStrict <$> readByte prot
getResult (Done _ a) = return a
getResult (Partial k) = refill >>= getResult . k
getResult f = throw $ ProtocolExn PE_INVALID_DATA (show f)
handleEOF :: SomeException -> IO BS.ByteString
handleEOF = const $ return mempty
-- | Converts a ByteString to a Floating point number
-- The ByteString is assumed to be encoded in network order (Big Endian)
-- therefore the behavior of this function varies based on whether the local
-- machine is big endian or little endian.
bsToDouble :: BS.ByteString -> Double
bsToDoubleLE :: BS.ByteString -> Double
#if __BYTE_ORDER == __LITTLE_ENDIAN
bsToDouble bs = unsafeDupablePerformIO $ unsafeUseAsCString bs castBsSwapped
bsToDoubleLE bs = unsafeDupablePerformIO $ unsafeUseAsCString bs castBs
#else
bsToDouble bs = unsafeDupablePerformIO $ unsafeUseAsCString bs castBs
bsToDoubleLE bs = unsafeDupablePerformIO $ unsafeUseAsCString bs castBsSwapped
#endif
castBsSwapped chrPtr = do
w <- peek (castPtr chrPtr)
poke (castPtr chrPtr) (byteSwap w)
peek (castPtr chrPtr)
castBs = peek . castPtr
-- | Swap endianness of a 64-bit word
byteSwap :: Word64 -> Word64
byteSwap w = (w `shiftL` 56 .&. 0xFF00000000000000) .|.
(w `shiftL` 40 .&. 0x00FF000000000000) .|.
(w `shiftL` 24 .&. 0x0000FF0000000000) .|.
(w `shiftL` 8 .&. 0x000000FF00000000) .|.
(w `shiftR` 8 .&. 0x00000000FF000000) .|.
(w `shiftR` 24 .&. 0x0000000000FF0000) .|.
(w `shiftR` 40 .&. 0x000000000000FF00) .|.
(w `shiftR` 56 .&. 0x00000000000000FF)
| johnbelamaric/themis | vendor/github.com/apache/thrift/lib/hs/src/Thrift/Protocol.hs | apache-2.0 | 4,484 | 0 | 14 | 870 | 1,059 | 587 | 472 | 91 | 12 |
-- This one should succeed; M.x is unambiguous
module ShouldCompile (module M) where
import Rn043_A as M -- x, M.x
import Rn043_B -- x, Rn043_A.x
| olsner/ghc | testsuite/tests/rename/should_compile/rn043.hs | bsd-3-clause | 161 | 0 | 4 | 40 | 21 | 16 | 5 | 3 | 0 |
-- Check overlap in n+k patterns
{-# LANGUAGE NPlusKPatterns #-}
module Foo where
g :: Int -> Int
g (x+1) = x
g y = y
g _ = 0 -- Overlapped
h :: Int -> Int
h (x+1) = x
h _ = 0 -- Not overlapped
| olsner/ghc | testsuite/tests/deSugar/should_compile/ds056.hs | bsd-3-clause | 218 | 0 | 9 | 72 | 84 | 45 | 39 | 9 | 1 |
{-# LANGUAGE ConstrainedClassMethods #-}
module Main where
class C a where
op :: (Show a, Show b) => a -> b -> String
-- This class op adds a constraint on 'a'
-- In GHC 7.0 this is fine, and it's a royal
-- pain to reject it when in H98 mode, so
-- I'm just allowing it
instance C Int where
op x y = show x ++ " " ++ show y
main = print (op (1::Int) 2)
| urbanslug/ghc | testsuite/tests/typecheck/should_fail/tcfail149.hs | bsd-3-clause | 368 | 0 | 9 | 94 | 100 | 54 | 46 | 7 | 1 |
{-# LANGUAGE TemplateHaskell, MultiParamTypeClasses, TypeFamilies,
FlexibleInstances #-}
module T4135a where
import Control.Monad
import Language.Haskell.TH
class Foo a where
type FooType a
createInstance' :: Q Type -> Q Dec
createInstance' t = liftM head [d|
instance Foo $t where
type FooType $t = String |]
| urbanslug/ghc | testsuite/tests/th/T4135a.hs | bsd-3-clause | 341 | 0 | 6 | 75 | 61 | 35 | 26 | 11 | 1 |
module Print1 where
main :: IO ()
main = putStrLn "Hello world!"
| nkbt/haskell-book | 3.3.hsproj/Print1.hs | mit | 68 | 0 | 6 | 15 | 22 | 12 | 10 | 3 | 1 |
type Church a = (a -> a) -> a -> a
zero :: Church a
zero = \s z -> z
one :: Church a
one = \s z -> s z
two :: Church a
two = \s z -> (s . s) z
-- lambda eta reduction, gets rid of z
three :: Church a
three = \s -> (s . s . s)
church2string :: Church String -> String
church2string x = x ('*':) ""
church2int :: Church Integer -> Integer
church2int x = x (+1) 0
-- operators
-- implement addition starting from y as the new z for x
cadd :: Church a -> Church a -> Church a
cadd x y = \s z -> x s (y s z)
-- implement with y times s as the super-succ function
-- \s z -> (x . y) s z :: \s z -> x (y s) z
cmul :: Church a -> Church a -> Church a
cmul x y = x . y
-- does not compile
--cexp :: Church a -> Church a -> Church a
--cexp x y = y x
| anwb/fp-one-on-one | church-numerals.hs | mit | 810 | 0 | 8 | 260 | 279 | 148 | 131 | 17 | 1 |
{- |
module: $Header$
description: OpenTheory packages
license: MIT
maintainer: Joe Leslie-Hurd <[email protected]>
stability: provisional
portability: portable
-}
module HOL.OpenTheory.Package
where
import Control.Concurrent (forkIO)
import Control.Concurrent.MVar (newEmptyMVar,putMVar,readMVar)
import Control.Monad (foldM,guard)
import qualified Data.Char as Char
import qualified Data.List as List
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import System.FilePath ((</>),(<.>),takeDirectory)
import qualified System.Process
import Text.Parsec ((<|>))
import qualified Text.Parsec as Parsec
import Text.Parsec.Text.Lazy ()
import Text.PrettyPrint ((<>),(<+>),($+$))
import qualified Text.PrettyPrint as PP
import HOL.OpenTheory.Article (readArticle)
import HOL.OpenTheory.Interpret (Interpret)
import qualified HOL.OpenTheory.Interpret as Interpret
import HOL.Parse
import HOL.Print
import HOL.Theory (Theory)
import qualified HOL.Theory as Theory
-------------------------------------------------------------------------------
-- Package names
-------------------------------------------------------------------------------
newtype Name = Name {destName :: String}
deriving (Eq,Ord,Show)
instance Printable Name where
toDoc = PP.text . destName
instance Parsable Name where
parser = do
c <- component
cs <- Parsec.many (Parsec.try (Parsec.char sep >> component))
return $ Name (List.intercalate [sep] (c : cs))
where
component = do
h <- Parsec.lower
t <- Parsec.many (Parsec.lower <|> Parsec.digit)
return (h : t)
sep = '-'
-------------------------------------------------------------------------------
-- Package versions
-------------------------------------------------------------------------------
newtype Version = Version {destVersion :: String}
deriving (Eq,Ord,Show)
instance Printable Version where
toDoc = PP.text . destVersion
instance Parsable Version where
parser = do
cs <- Parsec.sepBy1 component (Parsec.char sep)
return $ Version (List.intercalate [sep] cs)
where
component = Parsec.many1 Parsec.digit
sep = '.'
-------------------------------------------------------------------------------
-- Packages are stored in repos as name-version
-------------------------------------------------------------------------------
data NameVersion = NameVersion {name :: Name, version :: Version}
deriving (Eq,Ord,Show)
instance Printable NameVersion where
toDoc (NameVersion n v) = toDoc n <> PP.char '-' <> toDoc v
instance Parsable NameVersion where
parser = do
n <- parser
_ <- Parsec.char '-'
v <- parser
return $ NameVersion n v
-------------------------------------------------------------------------------
-- Information formatted as key: value
-------------------------------------------------------------------------------
data KeyValue = KeyValue Name String
deriving (Eq,Ord,Show)
instance Printable KeyValue where
toDoc (KeyValue k v) = toDoc k <> PP.char ':' <+> PP.text v
instance Parsable KeyValue where
parser = do
Parsec.skipMany spaceParser
n <- parser
Parsec.skipMany spaceParser
_ <- Parsec.char ':'
Parsec.skipMany spaceParser
v <- Parsec.many lineParser
return $ KeyValue n (List.dropWhileEnd Char.isSpace v)
printKeyValue :: Printable a => Name -> a -> KeyValue
printKeyValue n a = KeyValue n (toString a)
matchKeyValue :: Name -> KeyValue -> Maybe String
matchKeyValue n (KeyValue k v) = if k == n then Just v else Nothing
parseKeyValue :: Parsable a => Name -> KeyValue -> Maybe a
parseKeyValue n v = do
s <- matchKeyValue n v
a <- fromString s
return a
-------------------------------------------------------------------------------
-- Package information
-------------------------------------------------------------------------------
newtype Info = Info {destInfo :: [KeyValue]}
deriving (Eq,Ord,Show)
nullInfo :: Info -> Bool
nullInfo = null . destInfo
appendInfo :: Info -> Info -> Info
appendInfo (Info l) (Info l') = Info (l ++ l')
concatInfo :: [Info] -> Info
concatInfo = Info . concat . map destInfo
firstInfo :: (KeyValue -> Maybe a) -> Info -> Maybe (a,Info)
firstInfo f = g [] . destInfo
where
g _ [] = Nothing
g l (h : t) = case f h of
Just a -> Just (a, Info (foldl (flip (:)) t l))
Nothing -> g (h : l) t
firstGetInfo :: [Info -> Maybe (a,Info)] -> Info -> Maybe (a,Info)
firstGetInfo [] _ = Nothing
firstGetInfo (f : fs) i =
case f i of
Nothing -> firstGetInfo fs i
x -> x
mapGetInfo :: (a -> b) -> (Info -> Maybe (a,Info)) -> Info -> Maybe (b,Info)
mapGetInfo f g = let f0 (a,i) = (f a, i) in fmap f0 . g
maybeGetInfo :: (Info -> Maybe (a,Info)) -> Info -> (Maybe a, Info)
maybeGetInfo f i =
case f i of
Just (a,i') -> (Just a, i')
Nothing -> (Nothing,i)
listGetInfo :: (Info -> Maybe (a,Info)) -> Info -> ([a], Info)
listGetInfo f = g []
where
g l i = case f i of
Just (a,i') -> g (a : l) i'
Nothing -> (l,i)
instance Printable Info where
toDoc = PP.vcat . map toDoc . destInfo
instance Parsable Info where
parser = fmap Info $ Parsec.endBy parser eolParser
class Informative a where
toInfo :: a -> Info
getInfo :: Info -> Maybe (a,Info)
fromInfo :: Info -> Maybe a
fromInfo i = do
(a,i') <- getInfo i
guard $ nullInfo i'
return a
instance Informative a => Informative [a] where
toInfo = concatInfo . map toInfo
getInfo = Just . listGetInfo getInfo
instance (Informative a, Informative b) => Informative (a,b) where
toInfo (a,b) = appendInfo (toInfo a) (toInfo b)
getInfo i = do
(a,i') <- getInfo i
(b,i'') <- getInfo i'
return ((a,b),i'')
-------------------------------------------------------------------------------
-- Package files
-------------------------------------------------------------------------------
newtype File = File {destFile :: FilePath}
deriving (Eq,Ord,Show)
instance Printable File where
toDoc = PP.doubleQuotes . PP.text . destFile
instance Parsable File where
parser = do
_ <- Parsec.char '"'
f <- Parsec.many (Parsec.noneOf "\"\n")
_ <- Parsec.char '"'
return $ File f
-------------------------------------------------------------------------------
-- Interpretations
-------------------------------------------------------------------------------
data Interpretation =
Interpret Interpret.Rename
| Interpretation File
deriving (Eq,Ord,Show)
instance Informative Interpretation where
toInfo (Interpret i) = Info [printKeyValue (Name "interpret") i]
toInfo (Interpretation f) = Info [printKeyValue (Name "interpretation") f]
getInfo = firstGetInfo [getInterpret,getInterpretation]
where
getInterpret =
mapGetInfo Interpret
(firstInfo (parseKeyValue (Name "interpret")))
getInterpretation =
mapGetInfo Interpretation
(firstInfo (parseKeyValue (Name "interpretation")))
readInterpretation :: FilePath -> [Interpretation] -> IO Interpret
readInterpretation dir ints = do
rs <- mapM readRen ints
return $ Interpret.fromRenamesUnsafe (Interpret.concatRenames rs)
where
readRen (Interpret r) = return $ Interpret.Renames [r]
readRen (Interpretation f) = fromTextFile (dir </> destFile f)
-------------------------------------------------------------------------------
-- Theory blocks
-------------------------------------------------------------------------------
data Operation =
Article File
| Include {package :: NameVersion, checksum :: Maybe String}
| Union
deriving (Eq,Ord,Show)
data Block =
Block
{block :: Name,
imports :: [Name],
interpret :: [Interpretation],
operation :: Operation}
deriving (Eq,Ord,Show)
instance Informative Operation where
toInfo (Article f) = Info [printKeyValue (Name "article") f]
toInfo (Include p c) = Info (pv : cv)
where
pv = printKeyValue (Name "package") p
cv = case c of
Just s -> [KeyValue (Name "checksum") s]
Nothing -> []
toInfo Union = Info []
getInfo =
firstGetInfo [getArticle,getInclude,getUnion]
where
getArticle = mapGetInfo Article getArticleFile
getInclude i = do
(p,i') <- getPackage i
let (c,i'') = maybeGetInfo getChecksum i'
return (Include p c, i'')
getUnion i = Just (Union,i)
getArticleFile = firstInfo (parseKeyValue (Name "article"))
getPackage = firstInfo (parseKeyValue (Name "package"))
getChecksum = firstInfo (matchKeyValue (Name "checksum"))
mkBlock :: Name -> Info -> Maybe Block
mkBlock n info = do
let (imp,info') = listGetInfo getImport info
(int,op) <- fromInfo info'
guard (op /= Union || null int)
return $ Block n imp int op
where
getImport = firstInfo (parseKeyValue (Name "import"))
destBlock :: Block -> (Name,Info)
destBlock (Block n imp int op) =
(n, appendInfo impInfo (toInfo (int,op)))
where
impInfo = Info (map (printKeyValue (Name "import")) imp)
instance Printable Block where
toDoc b =
(toDoc n <+> PP.lbrace) $+$
PP.nest 2 (toDoc i) $+$
PP.rbrace
where
(n,i) = destBlock b
instance Parsable Block where
parser = do
n <- opener
i <- parser
closer
case mkBlock n i of
Just b -> return b
Nothing -> Parsec.parserFail "couldn't parse block"
where
opener = do
Parsec.skipMany spaceParser
n <- parser
Parsec.skipMany spaceParser
_ <- Parsec.char '{'
Parsec.skipMany spaceParser
eolParser
return n
closer = do
Parsec.skipMany spaceParser
_ <- Parsec.char '}'
Parsec.skipMany spaceParser
-------------------------------------------------------------------------------
-- Packages
-------------------------------------------------------------------------------
data Package =
Package
{information :: Info,
source :: [Block]}
deriving (Eq,Ord,Show)
instance Printable Package where
toDoc (Package i bs) =
PP.vcat (List.intersperse (PP.text "") (toDoc i : map toDoc bs))
instance Parsable Package where
parser = do
Parsec.skipMany eolParser
i <- parser
Parsec.skipMany eolParser
bs <- Parsec.sepEndBy parser (Parsec.skipMany1 eolParser)
return $ Package i bs
requires :: Package -> [Name]
requires = fst . listGetInfo getRequires . information
where
getRequires = firstInfo (parseKeyValue (Name "requires"))
-------------------------------------------------------------------------------
-- Interface to the OpenTheory package repository
-------------------------------------------------------------------------------
packageFile :: FilePath -> Name -> FilePath
packageFile d (Name n) = d </> n <.> "thy"
opentheory :: [String] -> IO String
opentheory args = System.Process.readProcess "opentheory" args []
opentheoryDirectory :: String -> IO FilePath
opentheoryDirectory s = do
dir <- opentheory ["info","--directory",s]
return $ List.dropWhileEnd Char.isSpace dir
directory :: Name -> IO FilePath
directory = opentheoryDirectory . toString
directoryVersion :: NameVersion -> IO FilePath
directoryVersion = opentheoryDirectory . toString
-------------------------------------------------------------------------------
-- Dependencies between theory blocks
-------------------------------------------------------------------------------
newtype Blocks = Blocks {destBlocks :: [Block]}
deriving (Eq,Ord,Show)
mkBlocks :: [Block] -> Blocks
mkBlocks bl = Blocks (check [] [] (Name "main"))
where
check st bs n =
if any ((==) n . block) bs then bs
else if notElem n st then add (n : st) bs (get n)
else error $ "import cycle including theory block " ++ toString n
add st bs b = b : foldl (check st) bs (imports b)
get n =
case filter ((==) n . block) bl of
[] -> error $ "missing theory block " ++ toString n
[b] -> b
_ : _ : _ -> error $ "multiple theory blocks named " ++ toString n
-------------------------------------------------------------------------------
-- Reading one package
-------------------------------------------------------------------------------
readVersion :: Theory -> Interpret -> NameVersion -> IO Theory
readVersion thy int nv = do
dir <- directoryVersion nv
readPackageFile thy int (packageFile dir (name nv))
readPackageFile :: Theory -> Interpret -> FilePath -> IO Theory
readPackageFile thy int file = do
pkg <- fromTextFile file
readPackage thy int (takeDirectory file) pkg
readPackage :: Theory -> Interpret -> FilePath -> Package -> IO Theory
readPackage thy int dir pkg = readBlocks thy int dir (mkBlocks (source pkg))
readBlocks :: Theory -> Interpret -> FilePath -> Blocks -> IO Theory
readBlocks thy int dir (Blocks bs) = do
vs <- foldM initT Map.empty bs
mapM_ (forkIO . mkT vs) bs
readMVar (getT vs (block (head bs)))
where
mkT vs b = do
ts <- mapM (readMVar . getT vs) (imports b)
t <- readBlock thy int dir ts b
putMVar (getT vs (block b)) t
getT vs n =
case Map.lookup n vs of
Just v -> v
Nothing -> error $ "bad theory block " ++ show n
initT vs b = do
v <- newEmptyMVar
return $ Map.insert (block b) v vs
readBlock :: Theory -> Interpret -> FilePath -> [Theory] -> Block -> IO Theory
readBlock envThy envInt dir impThys b = do
blockInt <- readInterpretation dir (interpret b)
let int = Interpret.compose blockInt envInt
case operation b of
Article f -> do
ths <- readArticle thy int (dir </> destFile f)
return $ Theory.fromThmSet ths
Include {package = nv} -> readVersion thy int nv
Union -> return impThy
where
thy = Theory.union envThy impThy
impThy = Theory.unionList impThys
-------------------------------------------------------------------------------
-- Dependencies between packages
-------------------------------------------------------------------------------
newtype Requires = Requires (Map Name ([Name],FilePath,Blocks))
emptyRequires :: Requires
emptyRequires = Requires Map.empty
addRequires :: Requires -> Name -> IO Requires
addRequires = add
where
add (Requires m) n = fmap Requires $ check [] m n
check st m n =
if Map.member n m then return m
else if notElem n st then pkg (n : st) m n
else error $ "requires cycle including package " ++ toString n
pkg st m n = do
d <- directory n
p <- fromTextFile (packageFile d n)
let r = requires p
let s = mkBlocks (source p)
foldM (check st) (Map.insert n (r,d,s) m) r
fromListRequires :: [Name] -> IO Requires
fromListRequires = foldM addRequires emptyRequires
-------------------------------------------------------------------------------
-- Reading packages
-------------------------------------------------------------------------------
readList :: [Name] -> IO [Theory]
readList ns = do
req <- mkReq
vs <- foldM initT Map.empty req
mapM_ (forkIO . mkT vs) req
mapM (readMVar . getT vs) ns
where
mkReq = do
Requires m <- fromListRequires ns
return $ Map.toList m
mkT vs (n,(r,d,s)) = do
ts <- mapM (readMVar . getT vs) r
let thy = Theory.unionList (Theory.standard : ts)
t <- readBlocks thy Interpret.empty d s
putMVar (getT vs n) t
getT vs n =
case Map.lookup n vs of
Just v -> v
Nothing -> error $ "bad package " ++ show n
initT vs (n,_) = do
v <- newEmptyMVar
return $ Map.insert n v vs
| gilith/hol | src/HOL/OpenTheory/Package.hs | mit | 15,844 | 4 | 17 | 3,453 | 5,144 | 2,625 | 2,519 | 350 | 5 |
module TicTacToe.PlayerSpec where
import Test.Hspec
import TicTacToe.Player (Player(..))
import qualified TicTacToe.Player as Player
spec :: Spec
spec = describe "TicTacToe.Player" $ do
describe "show" $
it "shows the player" $ do
show O `shouldBe` "Naughts"
show X `shouldBe` "Crosses"
describe "switch" $
it "switch players" $ do
Player.switch O `shouldBe` X
Player.switch X `shouldBe` O
| tomphp/haskell-tictactoe | test/TicTacToe/PlayerSpec.hs | mit | 440 | 0 | 13 | 105 | 136 | 71 | 65 | 14 | 1 |
module QualifiedDot where
twoDots = (Prelude..) | Pnom/haskell-ast-pretty | Test/examples/QualifiedDot.hs | mit | 48 | 0 | 5 | 6 | 13 | 9 | 4 | 2 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
module Main where
import System.Console.ANSI
import System.IO (hFlush, stdout)
import System.Directory
import System.FilePath.Posix (takeDirectory, (</>))
import Data.List (intercalate)
import Data.List.Split (splitOneOf)
import Data.Char (toLower, toUpper, isNumber, isLetter)
import Data.Data
import Text.Hastache
import Text.Hastache.Context
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy.Char8 as LZ
import Paths_holyhaskell
data Project = Project {
projectName :: String
, moduleName :: String
, author :: String
, mail :: String
, ghaccount :: String
, synopsis :: String
, year :: String
} deriving (Data, Typeable)
main :: IO ()
main = do
pkgFilePath <- getDataFileName "scaffold/LICENSE"
templateContent <- readFile pkgFilePath
intro
project <- ask "project name"
ioassert (checkProjectName project)
"Use only letters, numbers, spaces and dashes please"
let projectname = projectNameFromString project
modulename = capitalize project
in_author <- ask "name"
in_email <- ask "email"
in_ghaccount <- ask "github account"
in_synopsis <- ask "project in less than a dozen words"
current_year <- getCurrentYear
createProject $ Project projectname modulename in_author in_email
in_ghaccount in_synopsis current_year
end
createProject :: Project -> IO ()
createProject p = do
let context = mkGenericContext p
createDirectory (projectName p)
setCurrentDirectory (projectName p)
genFile context "gitignore" $ ".gitignore"
genFile context "project.cabal" $ (projectName p) ++ ".cabal"
genFile context "src/Main.hs" $ "src" </> "Main.hs"
genFile :: MuContext IO -> FilePath -> FilePath -> IO ()
genFile context filename outputFileName = do
pkgfileName <- getDataFileName ("scaffold/"++filename)
template <- BS.readFile pkgfileName
transformedFile <- hastacheStr defaultConfig template context
createDirectoryIfMissing True (takeDirectory outputFileName)
LZ.writeFile outputFileName transformedFile
ioassert :: Bool -> String -> IO ()
ioassert True _ = return ()
ioassert False str = error str
checkProjectName :: String -> Bool
checkProjectName [] = False
checkProjectName str =
all (\c -> isLetter c || isNumber c || c=='-' || c== ' ') str
colorPutStr :: Color -> String -> IO ()
colorPutStr color str = do
setSGR [ SetColor Foreground Dull color
, SetConsoleIntensity NormalIntensity
]
putStr str
setSGR []
projectNameFromString :: String -> String
projectNameFromString str = intercalate "-" (splitOneOf " -" (map toLower str))
bk :: String -> IO ()
bk str = colorPutStr Green ("Bridgekeeper: " ++ str ++ "\n")
bkn :: String -> IO ()
bkn str = colorPutStr Green ("Bridgekeeper: " ++ str)
you :: String -> IO ()
you str = colorPutStr Yellow ("Bridgekeeper: " ++ str ++ "\n")
capitalize :: String -> String
capitalize str = concatMap capitalizeWord (splitOneOf " -" str)
where
capitalizeWord :: String -> String
capitalizeWord (x:xs) = toUpper x:map toLower xs
capitalizeWord _ = []
ask :: String -> IO String
ask info = do
bk $ "What is your " ++ info ++ "?"
putStr "> "
hFlush stdout
getLine
intro :: IO ()
intro = do
bk "Stop!"
bk "Who would cross the Bridge of Death"
bk "must answer me these questions three,"
bk "ere the other side he see."
you "Ask me the questions, bridgekeeper, I am not afraid.\n"
end :: IO ()
end = do
putStrLn "\n\n"
bk "What... is the air-speed velocity of an unladen swallow?"
you "What do you mean? An African or European swallow?"
bk "Huh? I... I don't know that."
putStrLn "[the bridgekeeper is thrown over]"
bk "Auuuuuuuuuuuugh"
putStrLn "Sir Bedevere: How do you know so much about swallows?"
you "Well, you have to know these things when you're a king, you know."
| duikboot/holyhaskell | src/Main.hs | mit | 4,063 | 0 | 13 | 945 | 1,110 | 542 | 568 | 106 | 2 |
module Simple where
import Prelude (Bool(..), otherwise, Maybe(..), Show)
data Weekday = Monday | Tuesday | Wednesday | Thursday | Friday
| Saturday | Sunday
deriving Show
isWeekend :: Weekday -> Bool
isWeekend Saturday = True
isWeekend Sunday = True
isWeekend _ = False
lazyApply :: Weekday -> (a -> b) -> a -> Maybe b
lazyApply day f x | isWeekend day = Nothing -- nah
| otherwise = Just (f x)
not :: Bool -> Bool
not True = False
not False = True
map :: (a -> b) -> [a] -> [b]
map _ [] = []
map f (x:xs) = f x : map f xs
| antalsz/hs-to-coq | examples/simple/Simple.hs | mit | 594 | 0 | 8 | 180 | 251 | 136 | 115 | 18 | 1 |
module Parse.Post.Internal
(
PostBodyHtml (PostBodyHtml)
, PostHeaderHtml (PostHeaderHtml)
, PostHtml ( .. )
) where
import Data.ByteString.Char8 (ByteString)
import Text.HTML.TagSoup (Tag)
data PostHeaderHtml = PostHeaderHtml [Tag ByteString] deriving (Show)
data PostBodyHtml = PostBodyHtml [Tag ByteString] deriving (Show)
data PostHtml = PostHtml { header :: PostHeaderHtml, body :: PostBodyHtml } deriving (Show)
| JacobLeach/xen-parsing | app/Parse/Post/Internal.hs | mit | 433 | 0 | 8 | 66 | 124 | 76 | 48 | 14 | 0 |
module Lexer where
import Text.Parsec.String (Parser)
import Text.Parsec.Language (emptyDef)
import qualified Text.Parsec.Token as Tok
lexer :: Tok.TokenParser ()
lexer = Tok.makeTokenParser styles
where ops = ["+", "*", "-", "/"]
names = ["def", "extern"]
styles = emptyDef {
Tok.commentLine = "#",
Tok.reservedOpNames = ops,
Tok.reservedNames = names
}
integer :: Parser Integer
integer = Tok.integer lexer
float :: Parser Double
float = Tok.float lexer
identifier :: Parser String
identifier = Tok.identifier lexer
parens :: Parser a -> Parser a
parens = Tok.parens lexer
semiSep :: Parser a -> Parser [a]
semiSep = Tok.semiSep lexer
commaSep :: Parser a -> Parser [a]
commaSep = Tok.commaSep lexer
reserved :: String -> Parser ()
reserved = Tok.reserved lexer
reservedOp :: String -> Parser ()
reservedOp = Tok.reservedOp lexer
| tgkokk/kaleidoscope | src/Lexer.hs | mit | 949 | 0 | 8 | 241 | 301 | 164 | 137 | 28 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.