code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
import Test.Hspec (Spec, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import School (add, empty, grade, sorted)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = do
let fromList = foldr (uncurry add) empty
let fromGrade g = fromList . zip (repeat g)
it "add student" $
sorted (add 2 "Aimee" empty) `shouldBe` [(2, ["Aimee"])]
it "add more students in same class" $
sorted (fromGrade 2 ["James", "Blair", "Paul"])
`shouldBe` [(2, ["Blair", "James", "Paul"])]
it "add students to different grades" $
sorted (fromList [(3, "Chelsea"), (7, "Logan")])
`shouldBe` [(3, ["Chelsea"]), (7, ["Logan"])]
it "empty list if no students" $
sorted empty `shouldBe` []
it "get students in a grade" $
grade 5 (fromList [(5, "Franklin"), (5, "Bradley"), (1, "Jeff")])
`shouldBe` ["Bradley", "Franklin"]
it "get students in a non-existent grade" $
grade 1 empty `shouldBe` []
it "sorted school" $
sorted (fromList [ (4, "Jennifer" )
, (6, "Kareem" )
, (4, "Christopher")
, (3, "Kyle" ) ] )
`shouldBe` [ (3, ["Kyle" ] )
, (4, ["Christopher", "Jennifer"] )
, (6, ["Kareem" ] ) ]
-- 5199c155fa7ecd50642e38cbe14205fcb21cdd6b
|
exercism/xhaskell
|
exercises/practice/grade-school/test/Tests.hs
|
mit
| 1,651 | 0 | 13 | 609 | 502 | 286 | 216 | 33 | 1 |
{-# LANGUAGE QuasiQuotes, TypeFamilies #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE Rank2Types #-}
-- | A Yesod plugin for Authentication via e-mail
--
-- This plugin works out of the box by only setting a few methods on the type class
-- that tell the plugin how to interoprate with your user data storage (your database).
-- However, almost everything is customizeable by setting more methods on the type class.
-- In addition, you can send all the form submissions via JSON and completely control the user's flow.
-- This is a standard registration e-mail flow
--
-- 1) A user registers a new e-mail address, and an e-mail is sent there
-- 2) The user clicks on the registration link in the e-mail
-- Note that at this point they are actually logged in (without a password)
-- That means that when they log out they will need to reset their password
-- 3) The user sets their password and is redirected to the site.
-- 4) The user can now
-- * logout and sign in
-- * reset their password
module Yesod.Auth.Email
( -- * Plugin
authEmail
, YesodAuthEmail (..)
, EmailCreds (..)
, saltPass
-- * Routes
, loginR
, registerR
, forgotPasswordR
, setpassR
, isValidPass
-- * Types
, Email
, VerKey
, VerUrl
, SaltedPass
, VerStatus
, Identifier
-- * Misc
, loginLinkKey
, setLoginLinkKey
-- * Default handlers
, defaultRegisterHandler
, defaultForgotPasswordHandler
, defaultSetPasswordHandler
) where
import Network.Mail.Mime (randomString)
import Yesod.Auth
import System.Random
import qualified Data.Text as TS
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Crypto.Hash.MD5 as H
import Data.ByteString.Base16 as B16
import Data.Text.Encoding (encodeUtf8, decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import Data.Text (Text)
import Yesod.Core
import qualified Yesod.PasswordStore as PS
import qualified Text.Email.Validate
import qualified Yesod.Auth.Message as Msg
import Control.Applicative ((<$>), (<*>))
import Control.Monad (void)
import Yesod.Form
import Data.Time (getCurrentTime, addUTCTime)
import Safe (readMay)
loginR, registerR, forgotPasswordR, setpassR :: AuthRoute
loginR = PluginR "email" ["login"]
registerR = PluginR "email" ["register"]
forgotPasswordR = PluginR "email" ["forgot-password"]
setpassR = PluginR "email" ["set-password"]
verify :: Text -> Text -> AuthRoute -- FIXME
verify eid verkey = PluginR "email" ["verify", eid, verkey]
type Email = Text
type VerKey = Text
type VerUrl = Text
type SaltedPass = Text
type VerStatus = Bool
-- | An Identifier generalizes an email address to allow users to log in with
-- some other form of credentials (e.g., username).
--
-- Note that any of these other identifiers must not be valid email addresses.
--
-- Since 1.2.0
type Identifier = Text
-- | Data stored in a database for each e-mail address.
data EmailCreds site = EmailCreds
{ emailCredsId :: AuthEmailId site
, emailCredsAuthId :: Maybe (AuthId site)
, emailCredsStatus :: VerStatus
, emailCredsVerkey :: Maybe VerKey
, emailCredsEmail :: Email
}
class ( YesodAuth site
, PathPiece (AuthEmailId site)
, (RenderMessage site Msg.AuthMessage)
)
=> YesodAuthEmail site where
type AuthEmailId site
-- | Add a new email address to the database, but indicate that the address
-- has not yet been verified.
--
-- Since 1.1.0
addUnverified :: Email -> VerKey -> HandlerT site IO (AuthEmailId site)
-- | Send an email to the given address to verify ownership.
--
-- Since 1.1.0
sendVerifyEmail :: Email -> VerKey -> VerUrl -> HandlerT site IO ()
-- | Get the verification key for the given email ID.
--
-- Since 1.1.0
getVerifyKey :: AuthEmailId site -> HandlerT site IO (Maybe VerKey)
-- | Set the verification key for the given email ID.
--
-- Since 1.1.0
setVerifyKey :: AuthEmailId site -> VerKey -> HandlerT site IO ()
-- | Verify the email address on the given account.
--
-- Since 1.1.0
verifyAccount :: AuthEmailId site -> HandlerT site IO (Maybe (AuthId site))
-- | Get the salted password for the given account.
--
-- Since 1.1.0
getPassword :: AuthId site -> HandlerT site IO (Maybe SaltedPass)
-- | Set the salted password for the given account.
--
-- Since 1.1.0
setPassword :: AuthId site -> SaltedPass -> HandlerT site IO ()
-- | Get the credentials for the given @Identifier@, which may be either an
-- email address or some other identification (e.g., username).
--
-- Since 1.2.0
getEmailCreds :: Identifier -> HandlerT site IO (Maybe (EmailCreds site))
-- | Get the email address for the given email ID.
--
-- Since 1.1.0
getEmail :: AuthEmailId site -> HandlerT site IO (Maybe Email)
-- | Generate a random alphanumeric string.
--
-- Since 1.1.0
randomKey :: site -> IO Text
randomKey _ = do
stdgen <- newStdGen
return $ TS.pack $ fst $ randomString 10 stdgen
-- | Route to send user to after password has been set correctly.
--
-- Since 1.2.0
afterPasswordRoute :: site -> Route site
-- | Does the user need to provide the current password in order to set a
-- new password?
--
-- Default: if the user logged in via an email link do not require a password.
--
-- Since 1.2.1
needOldPassword :: AuthId site -> HandlerT site IO Bool
needOldPassword aid' = do
mkey <- lookupSession loginLinkKey
case mkey >>= readMay . TS.unpack of
Just (aidT, time) | Just aid <- fromPathPiece aidT, toPathPiece (aid `asTypeOf` aid') == toPathPiece aid' -> do
now <- liftIO getCurrentTime
return $ addUTCTime (60 * 30) time <= now
_ -> return True
-- | Check that the given plain-text password meets minimum security standards.
--
-- Default: password is at least three characters.
checkPasswordSecurity :: AuthId site -> Text -> HandlerT site IO (Either Text ())
checkPasswordSecurity _ x
| TS.length x >= 3 = return $ Right ()
| otherwise = return $ Left "Password must be at least three characters"
-- | Response after sending a confirmation email.
--
-- Since 1.2.2
confirmationEmailSentResponse :: Text -> HandlerT site IO TypedContent
confirmationEmailSentResponse identifier = do
mr <- getMessageRender
selectRep $ do
provideJsonMessage (mr msg)
provideRep $ authLayout $ do
setTitleI Msg.ConfirmationEmailSentTitle
[whamlet|<p>_{msg}|]
where
msg = Msg.ConfirmationEmailSent identifier
-- | Additional normalization of email addresses, besides standard canonicalization.
--
-- Default: Lower case the email address.
--
-- Since 1.2.3
normalizeEmailAddress :: site -> Text -> Text
normalizeEmailAddress _ = TS.toLower
-- | Handler called to render the registration page. The
-- default works fine, but you may want to override it in
-- order to have a different DOM.
--
-- Default: 'defaultRegisterHandler'.
--
-- Since: 1.2.6.
registerHandler :: AuthHandler site Html
registerHandler = defaultRegisterHandler
-- | Handler called to render the \"forgot password\" page.
-- The default works fine, but you may want to override it in
-- order to have a different DOM.
--
-- Default: 'defaultForgotPasswordHandler'.
--
-- Since: 1.2.6.
forgotPasswordHandler :: AuthHandler site Html
forgotPasswordHandler = defaultForgotPasswordHandler
-- | Handler called to render the \"set password\" page. The
-- default works fine, but you may want to override it in
-- order to have a different DOM.
--
-- Default: 'defaultSetPasswordHandler'.
--
-- Since: 1.2.6.
setPasswordHandler ::
Bool
-- ^ Whether the old password is needed. If @True@, a
-- field for the old password should be presented.
-- Otherwise, just two fields for the new password are
-- needed.
-> AuthHandler site TypedContent
setPasswordHandler = defaultSetPasswordHandler
authEmail :: YesodAuthEmail m => AuthPlugin m
authEmail =
AuthPlugin "email" dispatch $ \tm ->
[whamlet|
$newline never
<form method="post" action="@{tm loginR}">
<table>
<tr>
<th>_{Msg.Email}
<td>
<input type="email" name="email" required>
<tr>
<th>_{Msg.Password}
<td>
<input type="password" name="password" required>
<tr>
<td colspan="2">
<button type=submit .btn .btn-success>
_{Msg.LoginViaEmail}
<a href="@{tm registerR}" .btn .btn-default>
_{Msg.RegisterLong}
|]
where
dispatch "GET" ["register"] = getRegisterR >>= sendResponse
dispatch "POST" ["register"] = postRegisterR >>= sendResponse
dispatch "GET" ["forgot-password"] = getForgotPasswordR >>= sendResponse
dispatch "POST" ["forgot-password"] = postForgotPasswordR >>= sendResponse
dispatch "GET" ["verify", eid, verkey] =
case fromPathPiece eid of
Nothing -> notFound
Just eid' -> getVerifyR eid' verkey >>= sendResponse
dispatch "POST" ["login"] = postLoginR >>= sendResponse
dispatch "GET" ["set-password"] = getPasswordR >>= sendResponse
dispatch "POST" ["set-password"] = postPasswordR >>= sendResponse
dispatch _ _ = notFound
getRegisterR :: YesodAuthEmail master => HandlerT Auth (HandlerT master IO) Html
getRegisterR = registerHandler
-- | Default implementation of 'registerHandler'.
--
-- Since: 1.2.6
defaultRegisterHandler :: YesodAuthEmail master => AuthHandler master Html
defaultRegisterHandler = do
email <- newIdent
tp <- getRouteToParent
lift $ authLayout $ do
setTitleI Msg.RegisterLong
[whamlet|
<p>_{Msg.EnterEmail}
<form method="post" action="@{tp registerR}">
<div id="registerForm">
<label for=#{email}>_{Msg.Email}:
<input ##{email} type="email" name="email" width="150" autofocus>
<button .btn>_{Msg.Register}
|]
registerHelper :: YesodAuthEmail master
=> Bool -- ^ allow usernames?
-> Route Auth
-> HandlerT Auth (HandlerT master IO) TypedContent
registerHelper allowUsername dest = do
y <- lift getYesod
midentifier <- lookupPostParam "email"
let eidentifier = case midentifier of
Nothing -> Left Msg.NoIdentifierProvided
Just x
| Just x' <- Text.Email.Validate.canonicalizeEmail (encodeUtf8 x) ->
Right $ normalizeEmailAddress y $ decodeUtf8With lenientDecode x'
| allowUsername -> Right $ TS.strip x
| otherwise -> Left Msg.InvalidEmailAddress
case eidentifier of
Left route -> loginErrorMessageI dest route
Right identifier -> do
mecreds <- lift $ getEmailCreds identifier
registerCreds <-
case mecreds of
Just (EmailCreds lid _ _ (Just key) email) -> return $ Just (lid, key, email)
Just (EmailCreds lid _ _ Nothing email) -> do
key <- liftIO $ randomKey y
lift $ setVerifyKey lid key
return $ Just (lid, key, email)
Nothing
| allowUsername -> return Nothing
| otherwise -> do
key <- liftIO $ randomKey y
lid <- lift $ addUnverified identifier key
return $ Just (lid, key, identifier)
case registerCreds of
Nothing -> loginErrorMessageI dest (Msg.IdentifierNotFound identifier)
Just (lid, verKey, email) -> do
render <- getUrlRender
let verUrl = render $ verify (toPathPiece lid) verKey
lift $ sendVerifyEmail email verKey verUrl
lift $ confirmationEmailSentResponse identifier
postRegisterR :: YesodAuthEmail master => HandlerT Auth (HandlerT master IO) TypedContent
postRegisterR = registerHelper False registerR
getForgotPasswordR :: YesodAuthEmail master => HandlerT Auth (HandlerT master IO) Html
getForgotPasswordR = forgotPasswordHandler
-- | Default implementation of 'forgotPasswordHandler'.
--
-- Since: 1.2.6
defaultForgotPasswordHandler :: YesodAuthEmail master => AuthHandler master Html
defaultForgotPasswordHandler = do
tp <- getRouteToParent
email <- newIdent
lift $ authLayout $ do
setTitleI Msg.PasswordResetTitle
[whamlet|
<p>_{Msg.PasswordResetPrompt}
<form method="post" action="@{tp forgotPasswordR}">
<div id="registerForm">
<label for=#{email}>_{Msg.ProvideIdentifier}
<input ##{email} type=text name="email" width="150" autofocus>
<button .btn>_{Msg.SendPasswordResetEmail}
|]
postForgotPasswordR :: YesodAuthEmail master => HandlerT Auth (HandlerT master IO) TypedContent
postForgotPasswordR = registerHelper True forgotPasswordR
getVerifyR :: YesodAuthEmail site
=> AuthEmailId site
-> Text
-> HandlerT Auth (HandlerT site IO) TypedContent
getVerifyR lid key = do
realKey <- lift $ getVerifyKey lid
memail <- lift $ getEmail lid
mr <- lift getMessageRender
case (realKey == Just key, memail) of
(True, Just email) -> do
muid <- lift $ verifyAccount lid
case muid of
Nothing -> invalidKey mr
Just uid -> do
lift $ setCreds False $ Creds "email-verify" email [("verifiedEmail", email)] -- FIXME uid?
lift $ setLoginLinkKey uid
let msgAv = Msg.AddressVerified
selectRep $ do
provideRep $ do
lift $ setMessageI msgAv
fmap asHtml $ redirect setpassR
provideJsonMessage $ mr msgAv
_ -> invalidKey mr
where
msgIk = Msg.InvalidKey
invalidKey mr = messageJson401 (mr msgIk) $ lift $ authLayout $ do
setTitleI msgIk
[whamlet|
$newline never
<p>_{msgIk}
|]
postLoginR :: YesodAuthEmail master => HandlerT Auth (HandlerT master IO) TypedContent
postLoginR = do
(identifier, pass) <- lift $ runInputPost $ (,)
<$> ireq textField "email"
<*> ireq textField "password"
mecreds <- lift $ getEmailCreds identifier
maid <-
case ( mecreds >>= emailCredsAuthId
, emailCredsEmail <$> mecreds
, emailCredsStatus <$> mecreds
) of
(Just aid, Just email, Just True) -> do
mrealpass <- lift $ getPassword aid
case mrealpass of
Nothing -> return Nothing
Just realpass -> return $
if isValidPass pass realpass
then Just email
else Nothing
_ -> return Nothing
let isEmail = Text.Email.Validate.isValid $ encodeUtf8 identifier
case maid of
Just email ->
lift $ setCredsRedirect $ Creds
(if isEmail then "email" else "username")
email
[("verifiedEmail", email)]
Nothing ->
loginErrorMessageI LoginR $
if isEmail
then Msg.InvalidEmailPass
else Msg.InvalidUsernamePass
getPasswordR :: YesodAuthEmail master => HandlerT Auth (HandlerT master IO) TypedContent
getPasswordR = do
maid <- lift maybeAuthId
case maid of
Nothing -> loginErrorMessageI LoginR Msg.BadSetPass
Just _ -> do
needOld <- maybe (return True) (lift . needOldPassword) maid
setPasswordHandler needOld
-- | Default implementation of 'setPasswordHandler'.
--
-- Since: 1.2.6
defaultSetPasswordHandler :: YesodAuthEmail master => Bool -> AuthHandler master TypedContent
defaultSetPasswordHandler needOld = do
tp <- getRouteToParent
pass0 <- newIdent
pass1 <- newIdent
pass2 <- newIdent
mr <- lift getMessageRender
selectRep $ do
provideJsonMessage $ mr Msg.SetPass
provideRep $ lift $ authLayout $ do
setTitleI Msg.SetPassTitle
[whamlet|
$newline never
<h3>_{Msg.SetPass}
<form method="post" action="@{tp setpassR}">
<table>
$if needOld
<tr>
<th>
<label for=#{pass0}>Current Password
<td>
<input ##{pass0} type="password" name="current" autofocus>
<tr>
<th>
<label for=#{pass1}>_{Msg.NewPass}
<td>
<input ##{pass1} type="password" name="new" :not needOld:autofocus>
<tr>
<th>
<label for=#{pass2}>_{Msg.ConfirmPass}
<td>
<input ##{pass2} type="password" name="confirm">
<tr>
<td colspan="2">
<input type="submit" value=_{Msg.SetPassTitle}>
|]
postPasswordR :: YesodAuthEmail master => HandlerT Auth (HandlerT master IO) TypedContent
postPasswordR = do
maid <- lift maybeAuthId
case maid of
Nothing -> loginErrorMessageI LoginR Msg.BadSetPass
Just aid -> do
tm <- getRouteToParent
needOld <- lift $ needOldPassword aid
if not needOld then confirmPassword aid tm else do
current <- lift $ runInputPost $ ireq textField "current"
mrealpass <- lift $ getPassword aid
case mrealpass of
Nothing ->
lift $ loginErrorMessage (tm setpassR) "You do not currently have a password set on your account"
Just realpass
| isValidPass current realpass -> confirmPassword aid tm
| otherwise ->
lift $ loginErrorMessage (tm setpassR) "Invalid current password, please try again"
where
msgOk = Msg.PassUpdated
confirmPassword aid tm = do
(new, confirm) <- lift $ runInputPost $ (,)
<$> ireq textField "new"
<*> ireq textField "confirm"
if new /= confirm
then loginErrorMessageI setpassR Msg.PassMismatch
else do
isSecure <- lift $ checkPasswordSecurity aid new
case isSecure of
Left e -> lift $ loginErrorMessage (tm setpassR) e
Right () -> do
salted <- liftIO $ saltPass new
y <- lift $ do
setPassword aid salted
deleteSession loginLinkKey
setMessageI msgOk
getYesod
mr <- lift getMessageRender
selectRep $ do
provideRep $
fmap asHtml $ lift $ redirect $ afterPasswordRoute y
provideJsonMessage (mr msgOk)
saltLength :: Int
saltLength = 5
-- | Salt a password with a randomly generated salt.
saltPass :: Text -> IO Text
saltPass = fmap (decodeUtf8With lenientDecode)
. flip PS.makePassword 14
. encodeUtf8
saltPass' :: String -> String -> String
saltPass' salt pass =
salt ++ T.unpack (TE.decodeUtf8 $ B16.encode $ H.hash $ TE.encodeUtf8 $ T.pack $ salt ++ pass)
isValidPass :: Text -- ^ cleartext password
-> SaltedPass -- ^ salted password
-> Bool
isValidPass ct salted =
PS.verifyPassword (encodeUtf8 ct) (encodeUtf8 salted) || isValidPass' ct salted
isValidPass' :: Text -- ^ cleartext password
-> SaltedPass -- ^ salted password
-> Bool
isValidPass' clear' salted' =
let salt = take saltLength salted
in salted == saltPass' salt clear
where
clear = TS.unpack clear'
salted = TS.unpack salted'
-- | Session variable set when user logged in via a login link. See
-- 'needOldPassword'.
--
-- Since 1.2.1
loginLinkKey :: Text
loginLinkKey = "_AUTH_EMAIL_LOGIN_LINK"
-- | Set 'loginLinkKey' to the current time.
--
-- Since 1.2.1
setLoginLinkKey :: (YesodAuthEmail site, MonadHandler m, HandlerSite m ~ site) => AuthId site -> m ()
setLoginLinkKey aid = do
now <- liftIO getCurrentTime
setSession loginLinkKey $ TS.pack $ show (toPathPiece aid, now)
|
wujf/yesod
|
yesod-auth/Yesod/Auth/Email.hs
|
mit
| 21,028 | 0 | 24 | 6,373 | 3,966 | 2,038 | 1,928 | 339 | 10 |
module ComputeDigest where
import Data.Digest.Pure.SHA
import qualified Data.ByteString.Lazy as L
computeDigest :: FilePath -> IO String
computeDigest fName = showDigest . sha1 <$> L.readFile fName
|
mjansen/convert-log-helper
|
ComputeDigest.hs
|
mit
| 201 | 0 | 7 | 28 | 53 | 31 | 22 | 5 | 1 |
{-# LANGUAGE TemplateHaskell #-}
import Control.Monad
import Data.Default
import Control.Concurrent(threadDelay)
import Control.Monad.Trans.Free
import Control.Monad.Trans.State
import Control.Lens hiding (Level)
import System.IO
import qualified Data.Map as M
import Types
import Game as G
import Random
import TextlunkyParser
makeLenses ''Player
makeLenses ''Room
makeLenses ''Level
makeLenses ''GameState
main = do
hSetBuffering stdout NoBuffering
gen <- newStdGen
let gs = debug .~ True $ evalRand (randGameState gen) gen -- Set debug flag
when (not $ gs^.debug) initialize
G.runGame gs
initialize = do
forM_ "The walls are shifting............\n" $ \c ->
do putChar c
threadDelay 100000
threadDelay 500000
putStrLn $
concat $
["You find yourself in some dark, wet mines with 4 ropes and 4 bombs",
" in your backpack. You must survive.",
" You may want to take a look around.",
" What do you do?"]
|
5outh/textlunky
|
src/textlunky.hs
|
mit
| 968 | 0 | 13 | 197 | 243 | 125 | 118 | 34 | 1 |
-- Examples from chapter 4
-- http://learnyouahaskell.com/syntax-in-functions
lucky :: (Integral a) => a -> String
lucky 7 = "LUCKY NUMBER SEVEN!"
lucky x = "Sorry, you're out of luck, pal!"
factorial :: (Integral a) => a -> a
factorial 0 = 1
factorial x = x * factorial (x - 1)
addVectors :: (Num a) => (a, a) -> (a, a) -> (a, a)
addVectors (a1, a2) (b1, b2) = (a1 + b1, a2 + b2)
first :: (a, b, c) -> a
first (x, _, _) = x
head' :: [a] -> a
head' [] = error "Err"
head' (x:_) = x
tell :: (Show a) => [a] -> String
tell [] = "Empty"
tell (x:[]) = "One element: " ++ show x
tell (x:y:[]) = "Two elements: " ++ show x ++ " and " ++ show y
tell (x:y:_) = "More than two elements. First are " ++ show x ++ " and " ++ show y
tell' :: (Show a) => [a] -> String
tell' [] = "Empty"
tell' [x] = "One element: " ++ show x
tell' [x, y] = "Two elements: " ++ show x ++ " and " ++ show y
tell' (x:y:_) = "More than two elements. First are " ++ show x ++ " and " ++ show y
bmiTell :: (RealFloat a) => a -> a-> String
bmiTell weight height
| bmi <= 18.5 = "Underweight"
| bmi <= 25.0 = "Normal"
| bmi <= 30.0 = "Overweight"
| otherwise = "Obese"
where bmi = weight / height ^ 2
cylinder :: (RealFloat a) => a -> a -> a
cylinder r h =
let sideArea = 2 * pi * r * h
topArea = pi * r ^ 2
in sideArea + 2 * topArea
describeList1 :: [a] -> String
describeList1 xs = "The list is " ++ case xs of [] -> "empty"
[x] -> "a singleton"
xs -> "a longer list"
describeList2 :: [a] -> String
describeList2 xs = "The list is " ++ what xs
where what [] = "empty"
what [x] = "a singleton"
what xs = "a longer list"
|
Sgoettschkes/learning
|
haskell/LearnYouAHaskell/04.hs
|
mit
| 1,755 | 0 | 11 | 527 | 762 | 399 | 363 | 44 | 3 |
module GHCJS.DOM.DeviceOrientationEvent (
) where
|
manyoo/ghcjs-dom
|
ghcjs-dom-webkit/src/GHCJS/DOM/DeviceOrientationEvent.hs
|
mit
| 52 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
module Y2018.M01.D03.Exercise where
{--
Now.
For something COMPLETELY different:
From the wikipedia entry on Baron Munchhausen:
https://en.wikipedia.org/wiki/Baron_Munchausen
We have the following note on Munchausen numbers:
... the mathematical term "Munchausen number", coined by Daan van Berkel in
2009 to describe numbers whose digits, when raised to their own powers, can be
added together to form the number itself
for example, 3435 = 3^3 + 4^4 + 3^3 + 5^5
Write a munchausen number verifier.
--}
isMunchausen :: Integer -> Bool
isMunchausen n = undefined
{--
>>> isMunchausen 3435
True
>>> isMunchausen 4
False
--}
{-- BONUS -----------------------------------------------------------------
Here's the thing.
3435 is the only Munchausen number, base ten, but there are other Munchausen
numbers in other bases.
Write a base-n Munchausen number verifier. Then, with that, what is the first
Munchausen number base 9?
--}
isMunchausen' :: Int -> Integer -> Bool
isMunchausen' base n = undefined
|
geophf/1HaskellADay
|
exercises/HAD/Y2018/M01/D03/Exercise.hs
|
mit
| 1,017 | 0 | 6 | 166 | 51 | 30 | 21 | 5 | 1 |
{-# LANGUAGE Haskell2010
, TypeFamilies
, FlexibleContexts
, Trustworthy
, StandaloneDeriving
, DeriveDataTypeable
, ConstrainedClassMethods
#-}
{-# OPTIONS_GHC -Wall -fno-warn-name-shadowing #-}
-- | Efficient matrix operations in 100% pure Haskell.
--
-- This package uses miscellaneous implementations,
-- depending on the type of its components. Typically unboxed
-- arrays will perform best, while unboxed arrays give you
-- certain features such as 'Rational' or 'Complex' components.
--
-- The following component types are supported by 'Matrix':
--
-- [@Int@] Uses unboxed arrays internally. 'inv' will always
-- return 'Nothing'.
--
-- [@Integer@] Uses boxed arrays internally. 'inv' will always
-- return 'Nothing'.
--
-- [@Double@ and @Float@] Uses unboxed arrays internally.
-- All matrix operations will work as expected.
-- @Matrix Double@ will probably yield the best peformance.
--
-- [@Rational@] Best choice if precision is what you aim for.
-- Uses boxed arrays internally. All matrix operations will
-- work as expected.
--
-- [@Complex@] Experimental. Uses boxed arrays internally.
-- All matrix operations will work as expected, though
-- finding the inverse of a matrix isa tad less numerically
-- stable than with a @Double@ matrix.
module Numeric.Matrix (
Matrix,
MatrixElement (..),
-- * Matrix property and utility functions.
(<|>),
(<->),
scale,
-- ** Matrix properties
isUnit,
isZero,
isDiagonal,
isEmpty,
isSquare,
-- ** Conversions
toDoubleMatrix,
toComplexMatrix,
toRationalMatrix
) where
import Control.Applicative ((<$>))
import Control.DeepSeq
import Control.Monad
import Control.Monad.ST
import Data.Function (on)
import Data.Ratio
import Data.Complex
import Data.Maybe
import Data.Int
import Data.Word
import qualified Data.List as L
import Data.Array.IArray
import Data.Array.MArray
import Data.Array.Unboxed
import Data.Array.ST
import Data.STRef
import Data.Binary
import qualified Data.Array.Unsafe as U
import Data.Typeable
import Prelude (Show, Read, Num, Fractional, Eq, Bool (..), Integer, Integral,
Float, Double, RealFloat, Ord, Real,
(*), (/), (+), (-), (^), (.), (>=), (==), (/=), ($), (>), (!!),
(&&), (||),
undefined, null, head, zip, abs, flip, length, compare, drop,
negate, not, filter, fromIntegral, fst, snd, foldl1, min, max,
error, fromInteger, signum, lines, words, show, unwords,
unlines,
otherwise, id, const, uncurry, quot, toRational, fromRational)
import qualified Prelude as P
import Data.Monoid
-- | Matrices are represented by a type which fits best the component type.
-- For example a @Matrix Double@ is represented by unboxed arrays,
-- @Matrix Integer@ by boxed arrays.
--
-- Data instances exist for 'Int', 'Float', 'Double', 'Integer', 'Ratio',
-- and 'Complex'. Certain types do have certain disadvantages, like for
-- example you can not compute the inverse matrix of a @Matrix Int@.
--
-- Every matrix (regardless of the component type) has instances for
-- 'Show', 'Read', 'Num', 'Fractional', 'Eq', 'Typeable', 'Binary',
-- and 'NFData'. This means that you can use arithmetic operations like
-- '+', '*', and '/', as well as functions like 'show', 'read', or 'typeOf'.
--
-- [@Show (Matrix e)@]
-- Note that a Show instance for the component type @e@ must exist.
--
-- [@Read (Matrix e)@]
-- You can read a matrix like so:
--
-- > read "1 0\n0 1\n" :: Matrix Double
--
-- [@Num (Matrix e)@]
-- '+', '-', '*', 'negate', 'abs', 'signum', and 'fromInteger'.
--
-- 'signum' will compute the determinant and return the signum
-- of it.
--
-- 'abs' applies @map abs@ on the matrix (that is, it applies
-- @abs@ on every component in the matrix and returns a new
-- matrix without negative components).
--
-- @fromInteger@ yields a 1-x-1-matrix.
--
-- [@Fractional (Matrix e)@]
-- Only available if there exists an instance @Fractional e@
-- (the component type needs to have a @Fractional@ instance, too).
-- Note that while the 'Num' operations are safe, 'recip' and
-- '/' will fail (with an 'error') if the involved matrix is
-- not invertible or not a square matrix.
--
-- [@NFData (Matrix e)@]
-- Matrices have instances for NFData so that you can use a
-- matrix in parallel computations using the @Control.Monad.Par@
-- monad (see the @monad-par@ package for details).
--
-- [@Typeable (Matrix e)@]
-- Allows you to use matrices as 'Dynamic' values.
--
-- [@Binary (Matrix e)@]
-- Serialize and unserialize matrices using the @binary@ package.
-- See @encode@ and @decode@.
data family Matrix e
#if defined(__GLASGOW_HASKELL__) && (__GLASGOW_HASKELL__ >= 707)
deriving instance Typeable Matrix
#else
deriving instance Typeable1 Matrix
#endif
data instance Matrix Int
= IntMatrix !Int !Int (Array Int (UArray Int Int))
data instance Matrix Float
= FloatMatrix !Int !Int (Array Int (UArray Int Float))
data instance Matrix Double
= DoubleMatrix !Int !Int (Array Int (UArray Int Double))
data instance Matrix Integer
= IntegerMatrix !Int !Int (Array Int (Array Int Integer))
data instance Matrix (Ratio a)
= RatioMatrix !Int !Int (Array Int (Array Int (Ratio a)))
data instance Matrix (Complex a)
= ComplexMatrix !Int !Int (Array Int (Array Int (Complex a)))
instance (MatrixElement e, Show e) => Show (Matrix e) where
show = unlines . P.map showRow . toList
where
showRow = unwords . P.map ((' ':) . show)
instance (Read e, MatrixElement e) => Read (Matrix e) where
readsPrec _ = (\x -> [(x, "")]) . fromList . P.map (P.map P.read . words) . lines
instance (MatrixElement e) => Num (Matrix e) where
(+) = plus
(-) = minus
(*) = times
abs = map abs
signum = matrix (1,1) . const . signum . det
fromInteger = matrix (1,1) . const . fromInteger
instance (MatrixElement e, Fractional e) => Fractional (Matrix e) where
recip = fromJust . inv
fromRational = matrix (1,1) . const . fromRational
instance (MatrixElement e) => Eq (Matrix e) where
m == n
| dimensions m == dimensions n
= allWithIndex (\ix e -> m `at` ix == e) n
| otherwise = False
instance (MatrixElement e) => NFData (Matrix e) where
rnf matrix = matrix `deepseq` ()
instance (MatrixElement e, Binary e) => Binary (Matrix e) where
put m = do
let (rows, cols) = dimensions m
put rows >> put cols
forM_ [1..rows] $ \i -> do
forM_ [1..cols] $ \j -> do
put (m `at` (i,j))
get = do
rows <- get :: Get Int
cols <- get :: Get Int
forM [1..rows] (const (forM [1..cols] (const get)))
>>= return . fromList
(<|>) :: MatrixElement e => Matrix e -> Matrix e -> Matrix e
-- ^ Joins two matrices horizontally.
--
-- > 1 2 3 1 0 0 1 2 3 1 0 0
-- > 3 4 5 <|> 2 1 0 -> 3 4 5 2 1 0
-- > 5 6 7 3 2 1 5 6 7 3 2 1
m1 <|> m2 = let m = numCols m1
n1 = numRows m1
n2 = numRows m2
in matrix (max n1 n2, m + numCols m2)
$ \(i,j) -> if j > m
then (if i > n2 then 0 else m2 `at` (i,j-m))
else (if i > n1 then 0 else m1 `at` (i,j))
(<->) :: MatrixElement e => Matrix e -> Matrix e -> Matrix e
-- ^ Joins two matrices vertically.
--
-- > 1 2 3 1 0 0 1 2 3
-- > 3 4 5 <-> 2 1 0 -> 3 4 5
-- > 5 6 7 3 2 1 5 6 7
-- > 1 0 0
-- > 2 1 0
-- > 3 2 1
m1 <-> m2 = let m = numRows m1
n1 = numCols m1
n2 = numCols m2
in matrix (m + numRows m2, max n1 n2)
$ \(i,j) -> if i > m
then (if j > n2 then 0 else m2 `at` (i-m,j))
else (if j > n1 then 0 else m1 `at` (i,j))
scale :: MatrixElement e => Matrix e -> e -> Matrix e
-- ^ Scales a matrix by the given factor.
--
-- > scale s == map (*s)
scale m s = map (*s) m
isUnit, isDiagonal, isZero, isEmpty, isSquare :: MatrixElement e => Matrix e -> Bool
-- | Check whether the matrix consists of all zeros.
--
-- > isZero == all (== 0)
isZero = all (== 0)
-- | Check whether the matrix is an identity matrix.
--
-- > 1 0 0
-- > 0 1 0
-- > 0 0 1 (True)
isUnit m = isSquare m && allWithIndex (uncurry check) m
where check = \i j e -> if i == j then e == 1 else e == 0
-- | Checks whether the matrix is empty.
--
-- > isEmpty m = numCols == 0 || numRows == 0
isEmpty m = numRows m == 0 || numCols m == 0
-- | Checks whether the matrix is a diagonal matrix.
--
-- > 4 0 0 0
-- > 0 7 0 0
-- > 0 0 3 0
-- > 0 0 0 9 (True)
isDiagonal m = isSquare m && allWithIndex (uncurry check) m
where check = \i j e -> if i /= j then e == 0 else True
-- | Checks whether the matrix is a square matrix.
--
-- > isSquare == uncurry (==) . dimensions
isSquare m = let (a, b) = dimensions m in a == b
toDoubleMatrix :: (MatrixElement a, Integral a) => Matrix a -> Matrix Double
toDoubleMatrix = map fromIntegral
toRationalMatrix :: (MatrixElement a, Real a) => Matrix a -> Matrix Rational
toRationalMatrix = map toRational
toComplexMatrix :: (MatrixElement a, RealFloat a, Show a) => Matrix a -> Matrix (Complex a)
toComplexMatrix = map (:+ 0)
class Division e where
divide :: e -> e -> e
instance Division Int where divide = quot
instance Division Int8 where divide = quot
instance Division Int16 where divide = quot
instance Division Int32 where divide = quot
instance Division Int64 where divide = quot
instance Division Word8 where divide = quot
instance Division Word16 where divide = quot
instance Division Word32 where divide = quot
instance Division Word64 where divide = quot
instance Division Integer where divide = quot
instance Division Float where divide = (/)
instance Division Double where divide = (/)
instance Integral a => Division (Ratio a) where divide = (/)
instance RealFloat a => Division (Complex a) where divide = (/)
class (Eq e, Num e) => MatrixElement e where
-- | Creates a matrix of the given size using a generator
-- function for the value of each component.
matrix :: (Int, Int) -> ((Int, Int) -> e) -> Matrix e
-- | Builds a list from a matrix for the indices for which
-- the given predicate matches.
--
-- > trace == select (uncurry (==))
select :: ((Int, Int) -> Bool) -> Matrix e -> [e]
-- | Returns the component at the given position in the matrix.
-- Note that indices start at one, not at zero.
at :: Matrix e -> (Int, Int) -> e
-- | Returns the row at the given index in the matrix.
-- Note that indices start at one, not at zero.
row :: Int -> Matrix e -> [e]
-- | Returns the row at the given index in the matrix.
-- Note that indices start at one, not at zero.
col :: Int -> Matrix e -> [e]
-- | The dimensions of a given matrix.
dimensions :: Matrix e -> (Int, Int)
-- | The number of rows in the given matrix.
numRows :: Matrix e -> Int
-- | The number of columns in the given matrix.
numCols :: Matrix e -> Int
-- | Builds a matrix from a list of lists.
--
-- The innermost lists represent the rows. This function will create a m-n-matrix,
-- where m is the number of rows, which is the minimum length of the row lists
-- and n is the number of columns, i.e. the length of the outer list.
--
-- > fromList [[1,2,3],[2,1,3],[3,2,1]] :: Matrix Rational
fromList :: [[e]] -> Matrix e
-- | Turns a matrix into a list of lists.
--
-- > (toList . fromList) xs == xs
--
-- > (fromList . toList) mat == mat
toList :: Matrix e -> [[e]]
-- | An identity square matrix of the given size.
--
-- >>> unit 4
-- 1 0 0 0
-- 0 1 0 0
-- 0 0 1 0
-- 0 0 0 1
unit :: Int -> Matrix e
-- | A square matrix of the given size consisting of all zeros.
--
-- >>> zero 2
-- 0 0
-- 0 0
zero :: Int -> Matrix e
-- | A square matrix which trace is the given list, all other components
-- set to zero.
--
-- >>> diag [1,4,7,9]
-- 1 0 0 0
-- 0 4 0 0
-- 0 0 7 0
-- 0 0 0 9
diag :: [e] -> Matrix e
-- | Check whether the matrix is the empty matrix.
--
-- > dimensions empty == (0, 0)
empty :: Matrix e
-- | Subtract a matrix from another.
minus :: Matrix e -> Matrix e -> Matrix e
-- | Add two matrices.
--
-- You may also use the 'Num' instance for matrices,
-- i.e. @matrix1 + matrix2@ will work, too.
plus :: Matrix e -> Matrix e -> Matrix e
-- | Multiply two matrices /O(n^3)/.
--
-- You may also use the 'Num' instance for matrices,
-- i.e. @matrix1 * matrix2@ will work, too.
times :: Matrix e -> Matrix e -> Matrix e
-- | Compute the inverse of a matrix. Returns @Nothing@
-- if the matrix is not invertible.
inv :: Matrix e -> Maybe (Matrix e)
-- | Applies Bareiss multistep integer-preserving
-- algorithm for finding the determinant of a matrix.
-- Returns 0 if the matrix is not a square matrix.
det :: Matrix e -> e
-- | Flip rows and columns.
--
-- > 1 8 9 1 2 3
-- > 2 1 8 --transpose-> 8 1 2
-- > 3 2 1 9 8 1
transpose :: Matrix e -> Matrix e
-- | Compute the rank of a matrix.
rank :: Matrix e -> e
-- | Select the diagonal elements of a matrix as a list.
--
-- > 1 8 3
-- > 3 6 5 --trace-> [1, 6, 2]
-- > 7 4 2
trace :: Matrix e -> [e]
-- | Select the minor of a matrix, that is the determinant
-- of the 'minorMatrix'.
--
-- > minor = det . minorMatrix
minor :: MatrixElement e => (Int, Int) -> Matrix e -> e
-- | Select the minor matrix of a matrix, a matrix that is obtained
-- by deleting the i-th row and j-th column.
--
-- > 10 9 95 45
-- > 8 7 3 27 8 3 27
-- > 13 17 19 23 --minorMatrix (1,2)-> 13 19 23
-- > 1 2 5 8 1 5 8
minorMatrix :: MatrixElement e => (Int, Int) -> Matrix e -> Matrix e
cofactors :: MatrixElement e => Matrix e -> Matrix e
adjugate :: MatrixElement e => Matrix e -> Matrix e
-- | Apply a function on every component in the matrix.
map :: MatrixElement f => (e -> f) -> Matrix e -> Matrix f
-- | Apply a predicate on every component in the matrix
-- and returns True iff all components satisfy it.
all :: (e -> Bool) -> Matrix e -> Bool
-- | Apply a predicate on every component in the matrix
-- and return True if one or more components satisfy it.
any :: (e -> Bool) -> Matrix e -> Bool
-- | Compute the sum of the components of the matrix.
sum :: Matrix e -> e
-- | Map each component of the matrix to a monoid, and combine the results.
foldMap :: Monoid m => (e -> m) -> Matrix e -> m
mapWithIndex :: MatrixElement f => ((Int, Int) -> e -> f) -> Matrix e -> Matrix f
allWithIndex :: ((Int, Int) -> e -> Bool) -> Matrix e -> Bool
anyWithIndex :: ((Int, Int) -> e -> Bool) -> Matrix e -> Bool
foldMapWithIndex :: Monoid m => ((Int, Int) -> e -> m) -> Matrix e -> m
unit n = fromList [[ if i == j then 1 else 0 | j <- [1..n]] | i <- [1..n] ]
zero n = matrix (n,n) (const 0)
empty = fromList []
diag xs = matrix (n,n) (\(i,j) -> if i == j then xs !! (i-1) else 0)
where n = length xs
select p m = [ at m (i,j) | i <- [1..numRows m]
, j <- [1..numCols m]
, p (i,j) ]
at mat (i, j) = ((!! j) . (!! i) . toList) mat
row i = (!! (i-1)) . toList
col i = row i . transpose
numRows = fst . dimensions
numCols = snd . dimensions
dimensions m = case toList m of [] -> (0, 0)
(x:xs) -> (length xs + 1, length x)
adjugate = transpose . cofactors
transpose mat = matrix (n, m) (\(i,j) -> mat `at` (j,i))
where (m, n) = dimensions mat
trace = select (uncurry (==))
inv _ = Nothing
minorMatrix (i,j) mat = matrix (numRows mat - 1, numCols mat - 1) $
\(i',j') -> mat `at` (if i' >= i then i' + 1 else i',
if j' >= j then j' + 1 else j')
minor ix = det . minorMatrix ix
cofactors mat = matrix (dimensions mat) $
\(i,j) -> fromIntegral ((-1 :: Int)^(i+j)) * minor (i,j) mat
map f = mapWithIndex (const f)
all f = allWithIndex (const f)
any f = anyWithIndex (const f)
sum = getSum . foldMap Sum
foldMap f = foldMapWithIndex (const f)
mapWithIndex f m = matrix (dimensions m) (\x -> f x (m `at` x))
allWithIndex f m = P.all id [ f (i, j) (m `at` (i,j))
| i <- [1..numRows m], j <- [1..numCols m]]
anyWithIndex f m = P.any id [ f (i, j) (m `at` (i,j))
| i <- [1..numRows m], j <- [1..numCols m]]
foldMapWithIndex f m = mconcat [ f (i, j) (m `at` (i,j))
| i <- [1..numRows m], j <- [1..numCols m]]
a `plus` b
| dimensions a /= dimensions b = error "Matrix.plus: dimensions don't match."
| otherwise = matrix (dimensions a) (\x -> a `at` x + b `at` x)
a `minus` b
| dimensions a /= dimensions b = error "Matrix.minus: dimensions don't match."
| otherwise = matrix (dimensions a) (\x -> a `at` x - b `at` x)
a `times` b
| numCols a /= numRows b = error "Matrix.times: `numRows a' and `numCols b' don't match."
| otherwise = _mult a b
instance MatrixElement Int where
matrix d g = runST (_matrix IntMatrix arrayST arraySTU d g)
fromList = _fromList IntMatrix
at (IntMatrix _ _ arr) = _at arr
dimensions (IntMatrix m n _) = (m, n)
row i (IntMatrix _ _ arr) = _row i arr
col j (IntMatrix _ _ arr) = _col j arr
toList (IntMatrix _ _ arr) = _toList arr
det (IntMatrix m n arr) = if m /= n then 0 else runST (_det thawsUnboxed arr)
rank (IntMatrix _ _ arr) = runST (_rank thawsBoxed arr)
instance MatrixElement Integer where
matrix d g = runST (_matrix IntegerMatrix arrayST arrayST d g)
fromList = _fromList IntegerMatrix
at (IntegerMatrix _ _ arr) = _at arr
dimensions (IntegerMatrix m n _) = (m, n)
row i (IntegerMatrix _ _ arr) = _row i arr
col j (IntegerMatrix _ _ arr) = _col j arr
toList (IntegerMatrix _ _ arr) = _toList arr
det (IntegerMatrix m n arr) = if m /= n then 0 else runST (_det thawsBoxed arr)
rank (IntegerMatrix _ _ arr) = runST (_rank thawsBoxed arr)
instance MatrixElement Float where
matrix d g = runST (_matrix FloatMatrix arrayST arraySTU d g)
fromList = _fromList FloatMatrix
at (FloatMatrix _ _ arr) = _at arr
dimensions (FloatMatrix m n _ ) = (m, n)
row i (FloatMatrix _ _ arr) = _row i arr
col j (FloatMatrix _ _ arr) = _col j arr
toList (FloatMatrix _ _ arr) = _toList arr
det (FloatMatrix m n arr) = if m /= n then 0 else runST (_det thawsUnboxed arr)
rank (FloatMatrix _ _ arr) = runST (_rank thawsBoxed arr)
inv (FloatMatrix m n arr) = if m /= n then Nothing else
let x = runST (_inv unboxedST pivotMax arr)
in maybe Nothing (Just . FloatMatrix m n) x
instance MatrixElement Double where
matrix d g = runST (_matrix DoubleMatrix arrayST arraySTU d g)
fromList = _fromList DoubleMatrix
at (DoubleMatrix _ _ arr) = _at arr
dimensions (DoubleMatrix m n _ ) = (m, n)
row i (DoubleMatrix _ _ arr) = _row i arr
col j (DoubleMatrix _ _ arr) = _col j arr
toList (DoubleMatrix _ _ arr) = _toList arr
det (DoubleMatrix m n arr) = if m /= n then 0 else runST (_det thawsUnboxed arr)
rank (DoubleMatrix _ _ arr) = runST (_rank thawsBoxed arr)
inv (DoubleMatrix m n arr) = if m /= n then Nothing else
let x = runST (_inv unboxedST pivotMax arr)
in maybe Nothing (Just . DoubleMatrix m n) x
instance (Show a, Integral a) => MatrixElement (Ratio a) where
matrix d g = runST (_matrix RatioMatrix arrayST arrayST d g)
fromList = _fromList RatioMatrix
at (RatioMatrix _ _ arr) = _at arr
dimensions (RatioMatrix m n _ ) = (m, n)
row i (RatioMatrix _ _ arr) = _row i arr
col j (RatioMatrix _ _ arr) = _col j arr
toList (RatioMatrix _ _ arr) = _toList arr
det (RatioMatrix m n arr) = if m /= n then 0 else runST (_det thawsBoxed arr)
rank (RatioMatrix _ _ arr) = runST (_rank thawsBoxed arr)
inv (RatioMatrix m n arr) = if m /= n then Nothing else
let x = runST (_inv boxedST pivotMax arr)
in maybe Nothing (Just . RatioMatrix m n) x
instance (Show a, RealFloat a) => MatrixElement (Complex a) where
matrix d g = runST (_matrix ComplexMatrix arrayST arrayST d g)
fromList = _fromList ComplexMatrix
at (ComplexMatrix _ _ arr) = _at arr
dimensions (ComplexMatrix m n _ ) = (m, n)
row i (ComplexMatrix _ _ arr) = _row i arr
col j (ComplexMatrix _ _ arr) = _col j arr
toList (ComplexMatrix _ _ arr) = _toList arr
det (ComplexMatrix m n arr) = if m /= n then 0 else runST (_det thawsBoxed arr)
rank (ComplexMatrix _ _ arr) = runST (_rank thawsBoxed arr)
inv (ComplexMatrix m n arr) = if m /= n then Nothing else
let x = runST (_inv boxedST pivotNonZero arr)
in maybe Nothing (Just . ComplexMatrix m n) x
_at :: (IArray a (u Int e), IArray u e)
=> a Int (u Int e) -> (Int, Int) -> e
_at arr (i,j) = arr ! i ! j
_row, _col :: (IArray a (u Int e), IArray u e) => Int -> a Int (u Int e) -> [e]
_row i arr = let row = arr ! i in [ row ! j | j <- [1..(snd (bounds row))] ]
_col j arr = [ arr ! i ! j | i <- [1..(snd (bounds arr))] ]
_toList :: (IArray a e) => Array Int (a Int e) -> [[e]]
_toList = P.map elems . elems
_fromList :: (IArray a (u Int e), IArray u e)
=> (Int -> Int -> a Int (u Int e) -> matrix e) -> [[e]] -> matrix e
_fromList c xs =
let lengths = P.map length xs
numCols = if null lengths then 0 else foldl1 min lengths
numRows = length lengths
in c numRows numCols
$ array (1, numRows)
$ zip [1..numRows]
$ P.map (array (1, numCols) . zip [1..numCols]) xs
thawsBoxed :: (IArray a e, MArray (STArray s) e (ST s))
=> Array Int (a Int e)
-> ST s [STArray s Int e]
thawsBoxed = mapM thaw . elems
thawsUnboxed :: (IArray a e, MArray (STUArray s) e (ST s))
=> Array Int (a Int e)
-> ST s [STUArray s Int e]
thawsUnboxed = mapM thaw . elems
arrays :: [(u s) Int e]
-> ST s ((STArray s) Int ((u s) Int e))
arrays list = newListArray (1, length list) list
augment :: (IArray a e, MArray (u s) e (ST s), Num e)
=> ((Int, Int) -> [e] -> ST s ((u s) Int e))
-> Array Int (a Int e)
-> ST s (STArray s Int (u s Int e))
augment _ arr = do
let (_, n) = bounds arr
row (a,i) = newListArray (1, 2*n)
[ if j > n then (if j == i + n then 1 else 0)
else a ! j
| j <- [1..2*n] ]
mapM row (zip (elems arr) [1..]) >>= newListArray (1, n)
boxedST :: MArray (STArray s) e (ST s)
=> (Int, Int) -> [e] -> ST s ((STArray s) Int e)
boxedST = newListArray
unboxedST :: MArray (STUArray s) e (ST s)
=> (Int, Int) -> [e] -> ST s ((STUArray s) Int e)
unboxedST = newListArray
arrayST :: MArray (STArray s) e (ST s)
=> (Int, Int) -> e -> ST s ((STArray s) Int e)
arrayST = newArray
arraySTU :: MArray (STUArray s) e (ST s)
=> (Int, Int) -> e -> ST s ((STUArray s) Int e)
arraySTU = newArray
tee :: Monad m => (b -> m a) -> b -> m b
tee f x = f x >> return x
read :: (MArray a1 b m, MArray a (a1 Int b) m) =>
a Int (a1 Int b) -> Int -> Int -> m b
read a i j = readArray a i >>= flip readArray j
pivotMax :: Ord v => [(i, v)] -> i
pivotMax = fst . L.maximumBy (compare `on` snd)
pivotNonZero :: (Num v, Eq v) => [(i, v)] -> i
pivotNonZero xs = maybe (fst $ head xs) fst $ L.find ((/= 0) . snd) xs
_inv :: (IArray a e, MArray (u s) e (ST s), Fractional e, Show e, Eq e)
=> ((Int, Int) -> [e] -> ST s ((u s) Int e))
-- ^ A function for building a new array
-> ([(Int, e)] -> Int)
-- ^ A function for selecting pivot elements
-> Array Int (a Int e)
-- ^ A matrix as arrays or arrays
-> ST s (Maybe (Array Int (a Int e)))
_inv mkArrayST selectPivot mat = do
let m = snd $ bounds mat
n = 2 * m
swap a i j = do
tmp <- readArray a i
readArray a j >>= writeArray a i
writeArray a j tmp
okay <- newSTRef True
a <- augment mkArrayST mat
forM_ [1..m] $ \k -> do
iPivot <- selectPivot <$> zip [k..m]
<$> mapM (\i -> abs <$> read a i k) [k..m]
p <- read a iPivot k
if p == 0 then writeSTRef okay False else do
swap a iPivot k
forM_ [k+1..m] $ \i -> do
a_i <- readArray a i
a_k <- readArray a k
forM_ [k+1..n] $ \j -> do
a_ij <- readArray a_i j
a_kj <- readArray a_k j
a_ik <- readArray a_i k
writeArray a_i j (a_ij - a_kj * (a_ik / p))
writeArray a_i k 0
invertible <- readSTRef okay
if invertible then
do
forM_ [ m - v | v <- [0..m-1] ] $ \i -> do
a_i <- readArray a i
p <- readArray a_i i
writeArray a_i i 1
forM_ [i+1..n] $ \j -> do
readArray a_i j >>= writeArray a_i j . (/ p)
unless (i == m) $ do
forM_ [i+1..m] $ \k -> do
a_k <- readArray a k
p <- readArray a_i k
forM_ [k..n] $ \j -> do
a_ij <- readArray a_i j
a_kj <- readArray a_k j
writeArray a_i j (a_ij - p * a_kj)
mapM (\i -> readArray a i >>= getElems
>>= return . listArray (1, m) . drop m) [1..m]
>>= return . Just . listArray (1, m)
else return Nothing
_rank :: (IArray a e, MArray (u s) e (ST s), Num e, Division e, Eq e)
=> (Array Int (a Int e) -> ST s [(u s) Int e])
-- ^ A function for thawing a boxed array
-> Array Int (a Int e)
-- ^ A matrix given as array of arrays
-> ST s e
_rank thaws mat = do
let m = snd $ bounds mat
n = snd $ bounds (mat ! 1)
swap a i j = do
tmp <- readArray a i
readArray a j >>= writeArray a i
writeArray a j tmp
a <- thaws mat >>= arrays
ixPivot <- newSTRef 1
prevR <- newSTRef 1
forM_ [1..n] $ \k -> do
pivotRow <- readSTRef ixPivot
switchRow <- mapM (\i -> read a i k) [pivotRow .. m]
>>= return . L.findIndex (/= 0)
when (isJust switchRow) $ do
let ix = fromJust switchRow + pivotRow
when (pivotRow /= ix) (swap a pivotRow ix)
a_p <- readArray a k
pivot <- readArray a_p k
prev <- readSTRef prevR
forM_ [pivotRow+1..m] $ \i -> do
a_i <- readArray a i
forM_ [k+1..n] $ \j -> do
a_ij <- readArray a_i j
a_ik <- readArray a_i k
a_pj <- readArray a_p j
writeArray a_i j ((pivot * a_ij - a_ik * a_pj)
`divide` prev)
writeSTRef ixPivot (pivotRow + 1)
writeSTRef prevR pivot
readSTRef ixPivot >>= return . (+ negate 1) . fromIntegral
_det :: (IArray a e, MArray (u s) e (ST s),
Num e, Eq e, Division e)
=> (Array Int (a Int e) -> ST s [(u s) Int e])
-> Array Int (a Int e) -> ST s e
_det thaws mat = do
let size = snd $ bounds mat
a <- thaws mat >>= arrays
signR <- newSTRef 1
pivotR <- newSTRef 1
forM_ [1..size] $ \k -> do
sign <- readSTRef signR
unless (sign == 0) $ do
prev <- readSTRef pivotR
pivot <- read a k k >>= tee (writeSTRef pivotR)
when (pivot == 0) $ do
s <- forM [(k+1)..size] $ \r -> do
a_rk <- read a r k
if a_rk == 0 then return 0 else return r
let sf = filter (>0) s
when (not $ null sf) $ do
let sw = head sf
row <- readArray a sw
readArray a k >>= writeArray a sw
writeArray a k row
read a k k >>= writeSTRef pivotR
readSTRef signR >>= writeSTRef signR . negate
when (null sf) (writeSTRef signR 0)
sign' <- readSTRef signR
unless (sign' == 0) $ do
pivot' <- readSTRef pivotR
forM_ [(k+1)..size] $ \i -> do
a_i <- readArray a i
forM [(k+1)..size] $ \j -> do
a_ij <- readArray a_i j
a_ik <- readArray a_i k
a_kj <- read a k j
writeArray a_i j ((pivot' * a_ij - a_ik * a_kj) `divide` prev)
liftM2 (*) (readSTRef pivotR) (readSTRef signR)
-- TODO: More efficient implementation (decrease the constant factors
-- a little bit by working in the ST monad)
-- [ remark: not sure if this will be faster than lists -> benchmark! ]
_mult :: MatrixElement e => Matrix e -> Matrix e -> Matrix e
_mult a b = let rowsA = numRows a
rowsB = numRows b
colsB = numCols b
in matrix (rowsA, colsB) (\(i,j) -> L.foldl' (+) 0 [a `at` (i, k) * b `at` (k, j) | k <- [1..rowsB]])
_matrix :: (IArray a1 (u Int e), IArray u e,
MArray a2 (u Int e) (ST s), MArray a3 e (ST s),
Num e)
=> (Int -> Int -> a1 Int (u Int e) -> matrix)
-> ((Int, Int) -> a -> ST s (a2 Int (u Int e)))
-> ((Int, Int) -> e -> ST s (a3 Int e))
-> (Int, Int)
-> ((Int, Int) -> e)
-> ST s matrix
_matrix c newArray newArrayU (m, n) g = do
rows <- newArray (1, m) undefined
forM_ [1..m] $ \i -> do
cols <- newArrayU (1, n) 0
forM_ [1..n] $ \j -> do
writeArray cols j (g (i,j))
U.unsafeFreeze cols >>= writeArray rows i
U.unsafeFreeze rows >>= return . c m n
{-# RULES
"det/pow"
forall a k. det (a ^ k) = (det a) ^ k
#-}
|
scravy/bed-and-breakfast
|
src/Numeric/Matrix.hs
|
mit
| 31,373 | 2 | 31 | 10,376 | 10,486 | 5,475 | 5,011 | 536 | 4 |
module Language.Haskell.Lexer.Tokens where
-- | Haskell token classifications:
data Token
= Varid -- ^ Variable
| Conid -- ^ Constructor
| Varsym -- ^ Variable operator
| Consym -- ^ Constructor operator
| Reservedid -- ^ Reserved keyword
| Reservedop -- ^ Reserved operator
| Specialid
| IntLit -- ^ Integral numeric literal
| FloatLit -- ^ Fractional numeric literal
| CharLit -- ^ Character literal
| StringLit -- ^ String literal
| QQuote -- ^ Quasi quote: @[|text|stuff|]@
| Qvarid -- ^ Qualified variable
| Qconid -- ^ Qualified constructor
| Qvarsym -- ^ Qualified variable operator
| Qconsym -- ^ Qualified constructor operator
| Special
| Whitespace -- ^ White space
| NestedCommentStart -- ^ Internal: causes a call to an external function
| NestedComment -- ^ A nested comment ({- ... -})
| LiterateComment -- ^ Not handled by the lexer
| Commentstart -- ^ Dashes
| Comment -- ^ The stuff after the dashes
| ErrorToken | GotEOF | TheRest
| ModuleName | ModuleAlias -- ^ recognized in a later pass
-- Inserted during layout processing (see Haskell 98, 9.3):
| Layout -- ^ for implicit braces
| Indent Int -- ^ \<n\>, to preceed first token on each line
| Open Int -- ^ \{n\}, after let, where, do or of, if not followed by a \"{\"
deriving (Show,Eq,Ord)
|
yav/haskell-lexer
|
Language/Haskell/Lexer/Tokens.hs
|
mit
| 1,432 | 0 | 6 | 392 | 151 | 107 | 44 | 31 | 0 |
{-# LANGUAGE DeriveDataTypeable, StandaloneDeriving, FlexibleInstances #-}
module Syntax.Term where
import Data.Generics.Fixplate (Mu(..))
import qualified Data.Generics.Fixplate as Fix
import Data.Map (Map)
import Data.Scientific (Scientific)
import Data.Typeable
import Data.Data
import Syntax.Type (Type)
import qualified Syntax.Type as Type
newtype Identifier = Identifier String
deriving (Eq, Ord, Show, Typeable, Data)
newtype BitWidth = BitWidth Integer
deriving (Eq, Ord, Show, Typeable, Data)
data Numeric =
NumericFloat BitWidth Scientific
| NumericSigned BitWidth Integer
| NumericUnsigned BitWidth Integer
deriving (Eq, Ord, Show, Typeable, Data)
data Constant =
NumericConstant Numeric
| StringConstant String
| CharConstant Char
| BooleanConstant Bool
deriving (Eq, Ord, Show, Typeable, Data)
newtype Operator = Operator String
deriving (Eq, Ord, Show, Typeable, Data)
data Operation t =
BinaryOperation Operator t t
| UnaryOperation Operator t
deriving (Eq, Ord, Show, Functor, Foldable, Typeable, Data)
data TermF typeId id t =
Constant Constant
| Variable id
| Abstraction [id] t
| Application t [t]
| RecordIntroduction (Map String t)
| RecordElimination t String
| Operation (Operation t)
deriving (Eq, Ord, Show, Functor, Foldable, Typeable, Data)
deriving instance (Data typeId, Data id) => Data (Mu (TermF typeId id))
instance (Eq typeId, Eq id) => Fix.EqF (TermF typeId id) where equalF = (==)
instance (Ord typeId, Ord id) => Fix.OrdF (TermF typeId id) where compareF = compare
instance (Show typeId, Show id) => Fix.ShowF (TermF typeId id) where showsPrecF = showsPrec
type Term typeId id = Mu (TermF typeId id)
|
fredun/compiler
|
src/Syntax/Term.hs
|
mit
| 1,714 | 0 | 9 | 308 | 590 | 331 | 259 | 45 | 0 |
-- |
-- Module : FCH.Java
-- Copyright : Joe Jevnik
--
-- License : GPL-2
-- Maintainer : [email protected]
-- Stability : stable
-- Portability : GHC
--
-- The language module for C headers.
module FCH.Java (java) where
import Data.Char (toUpper)
import FCH.Data
-- | The c programming language (and maybe c++, who cares).
java :: Language
java = Language { mkComment = (++) "// "
, mkString = \fl cs -> "final String " ++ ufl fl
++ "_STR = " ++ mkStrLit cs ++ ";"
, mkLen = \fl cs -> "final int " ++ ufl fl ++ "_LEN = "
++ show (length cs) ++ ";"
, mdSetup = \_ md -> "public class " ++ md ++ "{"
, mdCleanup = \_ _ -> "}"
, checkFile = True
}
where
ufl = map toUpper
|
llllllllll/fch
|
FCH/Java.hs
|
gpl-2.0
| 858 | 0 | 12 | 330 | 186 | 109 | 77 | 13 | 1 |
-- File: ch3/exB12.hs
-- A function that computes the convex hull of a finite list of points.
import Data.List (groupBy, nub, sort, sortBy)
-- | Type alias for points in the Cartesian plane.
type Point = (Double, Double)
slope :: Point -> Point -> Double
-- ^ Calculates the slope between two points.
slope (x1, y1) (x2, y2) = (y2 - y1) / (x2 - x1)
norm :: Point -> Point -> Double
-- ^ Calculates the taxicab distance between two points.
norm (x1, y1) (x2, y2) = abs (x2 - x1) + abs (y2 - y1)
-- | Type for encoding relative direction.
data Direction = GoLeft | GoStraight | GoRight | GoBackwards | GoNowhere
deriving (Eq, Read, Show)
direction :: Point -> Point -> Point -> Direction
-- ^ Given points p1, p2, and p3, returns the direction to turn at `p2`
-- when traveling from `p1` through `p2` to `p3`.
direction p1@(x1, y1) p2@(x2, y2) p3@(x3, y3) = do
-- Check a few easy edge cases.
if p1 == p2 || p2 == p3 -- direction is not well-defined
then GoNowhere
else if p1 == p3 -- direction is backwards
then GoBackwards
else do
-- Here, we do a coordinate transformation that moves `p1` to the
-- origin and moves `p2` to (1,0). This puts `p3'` somewhere in the
-- plane, and then we branch on the trichotomy of `y3'`.
let det = (x2 - x1) ** 2 + (y2 - y1) ** 2
a = (x2 - x1) / det
b = (y2 - y1) / det
c = - (y2 - y1)
d = x2 - x1
x3' = a * (x3 - x1) + b * (y3 - y1)
y3' = c * (x3 - x1) + d * (y3 - y1)
if y3' > 0 -- `p3'` is in the upper half plane
then GoLeft
else if y3' < 0 -- `p3'` is in the lower half plane
then GoRight
else if x3' > 1 -- `p3'` is on the x-axis, beyond (1,0)
then GoStraight
else GoBackwards -- `p3'` is on the x-axis behind (1, 0)
grahamScan :: [Point] -> [Point]
-- ^ Takes a list of points in the plane and returns the vertices of
-- the smallest convex polygon containing the input points, ordered
-- counterclockwise around the perimeter.
grahamScan input = walkPerimeter . sorter $ input
where
sorter ps = (b :)
. map (\(p,_,_) -> p)
-- ^ forget slopes and norms, and push `b`
. map last
-- ^ take the furthest from each slope group
. map (sortBy (\(_,_,n1) (_,_,n2) -> compare n1 n2))
-- ^ sort each slope group by norm
. groupBy (\(_,s1,_) (_,s2,_) -> s1 == s2)
-- ^ group by slope
. sortBy (\(_,s1,_) (_,s2,_) -> compare s1 s2)
-- ^ sort by slope
. map (\p -> (p, slope b p, norm b p)) $ bs
-- ^ calculate slope and norm for remaning points
where
(b : bs) = sort . nub $ ps
-- ^ remove duplicates and find the lowest-leftmost point `b`
walkPerimeter :: [Point] -> [Point]
walkPerimeter ps = walkPerimeter' stack heap
where
stack = return . head $ ps
heap = tail ps
walkPerimeter' s@(s0 : _) h@(h0 : h1 : _) =
if direction s0 h0 h1 == GoLeft
then walkPerimeter' (h0 : s) (tail h)
else walkPerimeter' s (tail h)
walkPerimeter' s (h0 : []) = reverse (h0 : s)
|
friedbrice/RealWorldHaskell
|
ch3/exB12.hs
|
gpl-2.0
| 3,165 | 0 | 18 | 950 | 912 | 518 | 394 | 48 | 6 |
module TypeCheckProblem(
fitness
, generateInput
, worstScore
, bestScore
, generateTestCases
, readTestCases
, writeTestCases
) where
import GRPCommon
import System.Random
import Control.Monad
import Data.List
import GRPMath
import Dictionary
import GhciEval
--TODO: This file will generate Haskell expressions as Strings,
--give information about used functions, typeclasses etc and then
--expect the candidate to tell whether the given expression typechecks.
--it seems like a good idea to weight test cases so that non-type-checking
--tests have less of an impact.
--The function should be applicable to expression first and foremost,
--though it would be beneficial to also enable checking of whole functions
--the returned fitness value should establish a viable, learnable slope
--something along the lines of "echo \input\ | ghci"
--also, it will be useful to use "echo ":browse Prelude" | ghci"
--example I/O: In = ["map :: (a->b) -> [a] -> [b]", "(+ 1) :: Num a -> a -> a", "[1..3] :: (Enum t, Num t) => [t]]
--Out: map (+1) [1..3] :: (Enum b, Num b) => [b]
--fitness: section in front of arrow is bonus, the closer the better. Low impact on fitness
--double-colon is required; highish impact.
--section after :: - every matching parameter adds points, every right constraint, correct result type; high total impact
--TODO: Generate a set of Inputs on/pre startup, load from File; to avoid huge runtime of hint.
--TODO: Look into starting up hint globally to prevent long startup time.
testset explength datalength = do
exps <- forM [1..datalength] (\_ -> generateInput explength) :: IO [Input]
let dataset = map (\(TCI [x]) -> x) exps
doesTypeCheck <- fmap (map (/= "")) $ mapM eval dataset
return (length $ filter id doesTypeCheck, length doesTypeCheck)
writeTestCases :: [(Input, Output)] -> IO ()
writeTestCases lst = writeFile "TypeCheckTests" $ show lst
readTestCases :: IO [(Input, Output)]
readTestCases = liftM read $ readFile "TypeCheckTests"
generateTestCases :: Int -> IO ()
generateTestCases n = do
lstOfInputs <- mapM generateInput [div x 300 + 1 | x <- [1..n]]
lstOfOutputs <- mapM (\(TCI inp) -> eval $ head inp) lstOfInputs
--TODO: wrap this in a lookup container, make that available to fitness... somehow.
--writeFile "TypeCheckTests" $ show (zip lstOfInputs map $ TCO lstOfOutputs)
writeTestCases $ zip lstOfInputs (map TCO lstOfOutputs)
putStrLn "Hello world 1"
print lstOfInputs
putStrLn "Hello world 2"
print lstOfOutputs
putStrLn "Hello world 3"
--results <- zipWithM fitness lstOfInputs $ map TCO lstOfOutputs
--print results
putStrLn "Hello world 4"
return ()
lookUpTestCase :: String -> IO Output
lookUpTestCase input =
do
assocs <- readTestCases
return $ snd $ head $ filter (\(TCI x, _) -> head x == input) $ assocs
fitness :: Input -> Output -> IO Float
fitness (TCI inp) (TCO out) = do
(TCO actualType) <- lookUpTestCase $ head inp
print actualType
return $ similarity actualType out
fitness (PPI _) (PPO _) = error "You called the wrong function, doofus."
--TODO: Satisfies minimally necessary conditions. Not very smooth, reacts harshly to smaller errors.
--TODO: Needs to ignore type variable naming.
similarity :: String -> String -> Float
similarity a b = fromIntegral correctWords / fromIntegral numWords
where
numWords = max (length $ words a) (length $ words b)
correctWords = length $ filter id $ zipWith (==) (words a) (words b)
--TODO: Needs to intentionally generate more complex input structures: Brackets, indentation, type annotations, etc.
generateInput :: Int -> IO Input
generateInput 1 = do
(word, _) <- getStdRandom (pickRandomly declarations)
return $ TCI [word]
generateInput i = do
(word, _) <- getStdRandom (pickRandomly declarations)
(TCI [rest]) <- generateInput (i-1)
return $ TCI [word ++ " " ++ rest]
worstScore :: Input -> Float
worstScore x = 0.0
bestScore :: Input -> Float
bestScore x = 1.0
|
vektordev/GP
|
src/TypeCheckProblem.hs
|
gpl-2.0
| 3,955 | 0 | 15 | 704 | 880 | 448 | 432 | 63 | 1 |
{-
This source file is a part of the noisefunge programming environment.
Copyright (C) 2015 Rev. Johnny Healey <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE TemplateHaskell #-}
module Language.NoiseFunge.Befunge.Process (ProgArray, makeProgArray,
Dir(..), PC(PC),
Event(..),
pos, dir, Pos,
Delta(..), oldpc, newpc,
change, events,
ProcessState, makeProcessState,
Stack, pop, (#+), stackLength,
mem, pc, stack, quote,
jump, progIn, progOut,
fnStack,
noteBuf, ticks,
OperatorParams(..),
Fungine, FungeVM,
FungeProcess, FungeProgram,
Deltas, ProcessStats,
psTicks, psStackSize,
psQuote,
Operator(..),
opCode, opChar,
opName, opDesc,
OpSet(..),
processStats,
tellDelta, tellMem
) where
import Control.Lens
import Control.Monad
import Control.Monad.RWS
import qualified Data.Array as Arr
import Data.Array.Unboxed
import qualified Data.Binary as B
import Data.Char
import Data.Default
import Data.Word
import Language.NoiseFunge.Befunge.VM
import Language.NoiseFunge.Note
-- This module builds upon the VM code to generate processes that are distinct
-- to the noisefunge engine. This provides the underlying functionality that
-- the operators are built around. It includes things such as the program
-- counter, stack, and process memory.
type Pos = (Word8, Word8)
type ProgArray = UArray Pos Word8
-- Convert a list of lines into a program array. The height will be the number
-- of lines and the width will be the length of the longest line.
makeProgArray :: [String] -> ProgArray
makeProgArray strs = arr where
rows = fromIntegral $ length strs
cols = fromIntegral $ maximum $ fmap length strs
bnds = ((0,0), (rows-1, cols-1))
arr = array bnds $ do
(r, row) <- zip [0..] strs
(c, char) <- zip [0..cols-1] (row ++ pad)
return ((r,c), fromIntegral $ ord char)
pad = repeat ' '
data Dir = U | D | L | R
deriving (Read, Show, Eq, Ord, Enum)
instance B.Binary Dir where
get = (toEnum . fromIntegral) <$> B.getWord8
put = B.putWord8 . fromIntegral . fromEnum
data PC = PC {
_pos :: !Pos,
_dir :: !Dir
} deriving (Read, Show, Eq, Ord)
instance Default PC where
def = PC (0,0) R
instance B.Binary PC where
get = PC <$> B.get <*> B.get
put (PC p d) = B.put p >> B.put d
data Event =
StringEvent String
| ErrorEvent String
| NoteEvent !Note
deriving (Read, Show, Eq, Ord)
instance B.Binary Event where
get = B.getWord8 >>= getEv where
getEv 0 = StringEvent <$> B.get
getEv 1 = ErrorEvent <$> B.get
getEv 2 = NoteEvent <$> B.get
getEv _ = error "Bad event"
put (StringEvent s) = B.putWord8 0 >> B.put s
put (ErrorEvent s) = B.putWord8 1 >> B.put s
put (NoteEvent n) = B.putWord8 2 >> B.put n
-- A delta expresses how a process has changed in the last tick.
data Delta = Delta {
_oldpc :: Maybe PC,
_newpc :: Maybe PC,
_change :: Maybe ProgArray,
_events :: [Event]
} deriving (Show, Eq, Ord)
instance Semigroup Delta where
(<>) = mappend
instance Monoid Delta where
mempty = Delta Nothing Nothing Nothing []
mappend (Delta op1 np1 c1 e1) (Delta op2 np2 c2 e2) =
Delta (op1 `mplus` op2) (np2 `mplus` np1)
(c2 `mplus` c1) (e1 `mappend` e2)
instance B.Binary Delta where
get = Delta <$> B.get <*> B.get <*> B.get <*> B.get
put (Delta a b c d) = B.put a >> B.put b >> B.put c >> B.put d
instance Default Delta where
def = mempty
type Deltas s = [(PID, s, Delta)] -> [(PID, s, Delta)]
data Stack a = Stack !Word32 [a]
deriving (Read, Show, Eq, Ord)
instance Semigroup (Stack a) where
(<>) = mappend
instance Monoid (Stack a) where
mempty = Stack 0 []
mappend (Stack xl xs) (Stack yl ys) = Stack (xl + yl) (xs ++ ys)
(#+) :: a -> Stack a -> Stack a
a #+ Stack l xs = Stack (l+1) (a:xs)
pop :: Stack a -> Maybe (a, Stack a)
pop (Stack 0 []) = Nothing
pop (Stack 0 _) = error "Invalid stack length"
pop (Stack _ []) = error "Invalid stack contents"
pop (Stack l (x:xs)) = Just (x, Stack (l-1) xs)
stackLength :: Stack a -> Word32
stackLength (Stack l _) = l
data ProcessState = PS {
_mem :: !ProgArray,
_ticks :: !Word32,
_pc :: !PC,
_stack :: !(Stack Word8),
_quote :: !Bool,
_jump :: !Bool,
_fnStack :: !(Maybe [Word8]),
_progIn :: !String, -- Name of input buffer
_progOut :: !String, -- Name of output buffer
_noteBuf :: !(Maybe Note)
}
makeProcessState :: ProgArray -> String -> String -> ProcessState
makeProcessState arr inp outp =
PS arr 0 def mempty False False mempty inp outp Nothing
-- OperatorParams provides some options for how noisefunge operators should
-- behave.
data OperatorParams = OperatorParams {
haltOnError :: !Bool,
wrapOnEdge :: !Bool,
debugLogging :: !Bool
} deriving (Read, Show, Eq, Ord)
instance Default OperatorParams where
def = OperatorParams True False False
data Operator = Operator {
_opName :: String,
_opChar :: Char,
_opDesc :: String,
_opCode :: (Fungine ())
}
newtype OpSet = OpSet { getOpSet :: Arr.Array Word8 (Maybe (Fungine ())) }
type FungeRWS = RWS OperatorParams (Deltas ProcessState) OpSet
type Fungine = ProcessStateT Word8 ProcessState FungeRWS
type FungeVM = VM Word8 ProcessState FungeRWS
type FungeProcess = Process Word8 ProcessState FungeRWS
type FungeProgram = Program Word8 ProcessState FungeRWS
$(makeLenses ''ProcessState)
$(makeLenses ''Delta)
$(makeLenses ''PC)
$(makeLenses ''Operator)
tellDelta :: Delta -> Fungine ()
tellDelta d = do
pid <- getPID
st <- getProcessState
tell ((pid, st, d):)
tellMem :: Fungine ()
tellMem = do
arr <- use mem
tellDelta $ Delta Nothing Nothing (Just arr) []
data ProcessStats = PStats {
_psTicks :: !Word32,
_psStackSize :: !Word32,
_psQuote :: !Bool
} deriving (Show, Eq, Ord)
$(makeLenses ''ProcessStats)
instance B.Binary ProcessStats where
get = PStats <$> B.get <*> B.get <*> B.get
put (PStats a b c) = B.put a >> B.put b >> B.put c
processStats :: Getter ProcessState ProcessStats
processStats = to processStats' where
processStats' ps = PStats (ps^.ticks) (stackLength $ ps^.stack)
(ps^.quote )
|
revnull/noisefunge
|
src/Language/NoiseFunge/Befunge/Process.hs
|
gpl-3.0
| 7,924 | 0 | 13 | 2,600 | 2,235 | 1,216 | 1,019 | 212 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
module DetermineTheType where
example = 1
co :: (b -> c) -> (a -> b) -> a -> c
co bToC aToB a = bToC (aToB a)
|
dkensinger/haskell
|
haskellbook/determineTypes.hs
|
gpl-3.0
| 157 | 0 | 8 | 35 | 62 | 34 | 28 | 5 | 1 |
module Ignifera.Piece where
import Ignifera.Color
import Ignifera.PieceType
data Piece = Piece Color PieceType
deriving (Eq, Read, Show)
|
fthomas/ignifera
|
src/Ignifera/Piece.hs
|
gpl-3.0
| 141 | 0 | 6 | 21 | 42 | 24 | 18 | 5 | 0 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
module Cegt.PrettyPrinting where
import Cegt.Syntax
import Text.PrettyPrint
import Text.Parsec.Pos
import Data.Char
import Text.Parsec.Error(ParseError,showErrorMessages,errorPos,errorMessages)
class Disp d where
disp :: d -> Doc
precedence :: d -> Int
precedence _ = 0
instance Disp Doc where
disp = id
instance Disp String where
disp x = if (isUpper $ head x) || (isLower $ head x)
then text x
else if head x == '`'
then text x
else parens $ text x
instance Disp Int where
disp = integer . toInteger
dParen:: (Disp a) => Int -> a -> Doc
dParen level x =
if level >= (precedence x)
then parens $ disp x
else disp x
instance Disp Exp where
disp (Const x) = disp x
disp (Constr x) = disp x
disp (Var x) = disp x
disp (s@(App s1 s2)) =
dParen (precedence s - 1) s1 <+>
dParen (precedence s) s2
disp (Lambda x t) = text "\\" <+> text x
<+> text "." <+> disp t
disp (Pos _ t) = disp t
disp (a@(Arrow t1 t2)) =
disp t1
<+> text "~>"
<+> disp t2
disp (a@(Forall x f)) =
text "forall" <+> disp x
<+> text "."
<+> disp f
disp (a@(Imply t1 t2)) =
dParen (precedence a) t1
<+> text "=>"
<+> dParen (precedence a - 1) t2
precedence (Imply _ _) = 4
precedence (Pos _ t) = precedence t
precedence (Var _) = 12
precedence (Constr _) = 12
precedence (Const _) = 12
precedence (App _ _) = 10
precedence _ = 0
instance Disp Module where
disp (Module decl) = vcat (map disp decl)
instance Disp Decl where
disp (Rule n r) = disp n <+> text ":" <+> disp r
instance Disp SourcePos where
disp sp = text (sourceName sp) <> colon <> int (sourceLine sp)
<> colon <> int (sourceColumn sp) <> colon
instance Disp ParseError where
disp pe = (disp (errorPos pe)) $$
(text "Parse Error:" $$ sem)
where sem = text $ showErrorMessages "or" "unknown parse error"
"expecting" "unexpected" "end of input"
(errorMessages pe)
|
Fermat/CEGT
|
src/Cegt/PrettyPrinting.hs
|
gpl-3.0
| 2,112 | 0 | 13 | 611 | 873 | 436 | 437 | 68 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Shortner.Types where
import Control.Applicative ((<*>))
import Data.Aeson hiding (json)
import Data.Aeson.TH
import Data.Functor ((<$>))
import Data.UUID
import Data.UUID.Aeson
import Data.UUID.V4
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.FromRow
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import Web.Scotty
data Domain = Domain
{ domainId :: UUID
, hostname :: T.Text
, name :: T.Text
}
instance FromRow Domain where
fromRow = Domain <$> field <*> field <*> field
data Link = Link
{ linkId :: UUID
, linkDomainId :: UUID
, code :: T.Text
, longUrl :: T.Text
, hits :: Integer
} deriving (Show, Eq)
$(deriveToJSON defaultOptions ''Link)
instance FromRow Link where
fromRow = Link <$> field <*> field <*> field <*> field <*> field
instance Parsable UUID where
parseParam p = maybe (Left "") Right $ fromString (LT.unpack p)
|
divarvel/scotty-short
|
src/Shortner/Types.hs
|
gpl-3.0
| 1,106 | 0 | 10 | 293 | 300 | 177 | 123 | 33 | 0 |
module Main where
import qualified Pacman as P
main :: IO ()
main = P.run
|
josuf107/Pacman
|
Main.hs
|
gpl-3.0
| 76 | 0 | 6 | 17 | 27 | 17 | 10 | 4 | 1 |
module Generation.Pattern
( Language (..)
, Shaped(..)
, generate
) where
import Data.List
import Data.Function
{-
-- p = pattern
-- a = map
-- m = generating monad
-- l = map boundary limit (to declare a subpattern has a smaller domain than the initial pattern)
-}
data Language b a m l = Language
{ empty :: a l
, zero :: b
, pattern :: b -> a l -> m (a l, [(l, b)])
}
data Shaped a l = Shaped
{ extract :: a l -> l
, set :: l -> a l -> a l
}
generate :: (Monad m, Ord l) => Language b a m l -> Shaped a l -> m (a l)
generate lang shape = sortedAccumM (pattern lang) shape (empty lang)
$ [((extract shape) (empty lang), zero lang)]
-- FIXME: ... this should be sorting on `b` rather than `l`.
sortedAccumM :: (Monad m, Ord l) => (b -> a l -> m (a l, [(l, b)])) -> Shaped a l -> a l -> [(l, b)] -> m (a l)
sortedAccumM _ _ acc [] = return acc
sortedAccumM advance shape acc ((b, p):remaining) = do
let l = (extract shape) acc
(acc', new) <- advance p ((set shape) b acc)
let acc'' = (set shape) l acc'
sortedAccumM advance shape acc'' $ sortBy (compare `on` fst) $ new ++ remaining
|
xaphiriron/maps
|
Generation/Pattern.hs
|
gpl-3.0
| 1,104 | 42 | 10 | 255 | 488 | 274 | 214 | 23 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudTasks.Projects.Locations.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists information about the supported locations for this service.
--
-- /See:/ <https://cloud.google.com/tasks/ Cloud Tasks API Reference> for @cloudtasks.projects.locations.list@.
module Network.Google.Resource.CloudTasks.Projects.Locations.List
(
-- * REST Resource
ProjectsLocationsListResource
-- * Creating a Request
, projectsLocationsList
, ProjectsLocationsList
-- * Request Lenses
, pllXgafv
, pllUploadProtocol
, pllAccessToken
, pllUploadType
, pllName
, pllFilter
, pllPageToken
, pllPageSize
, pllCallback
) where
import Network.Google.CloudTasks.Types
import Network.Google.Prelude
-- | A resource alias for @cloudtasks.projects.locations.list@ method which the
-- 'ProjectsLocationsList' request conforms to.
type ProjectsLocationsListResource =
"v2" :>
Capture "name" Text :>
"locations" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListLocationsResponse
-- | Lists information about the supported locations for this service.
--
-- /See:/ 'projectsLocationsList' smart constructor.
data ProjectsLocationsList =
ProjectsLocationsList'
{ _pllXgafv :: !(Maybe Xgafv)
, _pllUploadProtocol :: !(Maybe Text)
, _pllAccessToken :: !(Maybe Text)
, _pllUploadType :: !(Maybe Text)
, _pllName :: !Text
, _pllFilter :: !(Maybe Text)
, _pllPageToken :: !(Maybe Text)
, _pllPageSize :: !(Maybe (Textual Int32))
, _pllCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pllXgafv'
--
-- * 'pllUploadProtocol'
--
-- * 'pllAccessToken'
--
-- * 'pllUploadType'
--
-- * 'pllName'
--
-- * 'pllFilter'
--
-- * 'pllPageToken'
--
-- * 'pllPageSize'
--
-- * 'pllCallback'
projectsLocationsList
:: Text -- ^ 'pllName'
-> ProjectsLocationsList
projectsLocationsList pPllName_ =
ProjectsLocationsList'
{ _pllXgafv = Nothing
, _pllUploadProtocol = Nothing
, _pllAccessToken = Nothing
, _pllUploadType = Nothing
, _pllName = pPllName_
, _pllFilter = Nothing
, _pllPageToken = Nothing
, _pllPageSize = Nothing
, _pllCallback = Nothing
}
-- | V1 error format.
pllXgafv :: Lens' ProjectsLocationsList (Maybe Xgafv)
pllXgafv = lens _pllXgafv (\ s a -> s{_pllXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pllUploadProtocol :: Lens' ProjectsLocationsList (Maybe Text)
pllUploadProtocol
= lens _pllUploadProtocol
(\ s a -> s{_pllUploadProtocol = a})
-- | OAuth access token.
pllAccessToken :: Lens' ProjectsLocationsList (Maybe Text)
pllAccessToken
= lens _pllAccessToken
(\ s a -> s{_pllAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pllUploadType :: Lens' ProjectsLocationsList (Maybe Text)
pllUploadType
= lens _pllUploadType
(\ s a -> s{_pllUploadType = a})
-- | The resource that owns the locations collection, if applicable.
pllName :: Lens' ProjectsLocationsList Text
pllName = lens _pllName (\ s a -> s{_pllName = a})
-- | A filter to narrow down results to a preferred subset. The filtering
-- language accepts strings like \"displayName=tokyo\", and is documented
-- in more detail in [AIP-160](https:\/\/google.aip.dev\/160).
pllFilter :: Lens' ProjectsLocationsList (Maybe Text)
pllFilter
= lens _pllFilter (\ s a -> s{_pllFilter = a})
-- | A page token received from the \`next_page_token\` field in the
-- response. Send that page token to receive the subsequent page.
pllPageToken :: Lens' ProjectsLocationsList (Maybe Text)
pllPageToken
= lens _pllPageToken (\ s a -> s{_pllPageToken = a})
-- | The maximum number of results to return. If not set, the service selects
-- a default.
pllPageSize :: Lens' ProjectsLocationsList (Maybe Int32)
pllPageSize
= lens _pllPageSize (\ s a -> s{_pllPageSize = a}) .
mapping _Coerce
-- | JSONP
pllCallback :: Lens' ProjectsLocationsList (Maybe Text)
pllCallback
= lens _pllCallback (\ s a -> s{_pllCallback = a})
instance GoogleRequest ProjectsLocationsList where
type Rs ProjectsLocationsList = ListLocationsResponse
type Scopes ProjectsLocationsList =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsLocationsList'{..}
= go _pllName _pllXgafv _pllUploadProtocol
_pllAccessToken
_pllUploadType
_pllFilter
_pllPageToken
_pllPageSize
_pllCallback
(Just AltJSON)
cloudTasksService
where go
= buildClient
(Proxy :: Proxy ProjectsLocationsListResource)
mempty
|
brendanhay/gogol
|
gogol-cloudtasks/gen/Network/Google/Resource/CloudTasks/Projects/Locations/List.hs
|
mpl-2.0
| 6,103 | 0 | 19 | 1,439 | 962 | 556 | 406 | 133 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.People.Types
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.People.Types
(
-- * Service Configuration
peopleService
-- * OAuth Scopes
, contactsOtherReadOnlyScope
, directoryReadOnlyScope
, userBirthdayReadScope
, userInfoProFileScope
, userPhoneNumbersReadScope
, userInfoEmailScope
, contactsReadOnlyScope
, userAddressesReadScope
, userOrganizationReadScope
, contactsScope
, userEmailsReadScope
, userGenderReadScope
-- * BatchUpdateContactsRequestSourcesItem
, BatchUpdateContactsRequestSourcesItem (..)
-- * PeopleUpdateContactSources
, PeopleUpdateContactSources (..)
-- * SipAddress
, SipAddress
, sipAddress
, saValue
, saMetadata
, saType
, saFormattedType
-- * BatchUpdateContactsRequestContacts
, BatchUpdateContactsRequestContacts
, batchUpdateContactsRequestContacts
, bucrcAddtional
-- * Photo
, Photo
, photo
, pDefault
, pURL
, pMetadata
-- * Event
, Event
, event
, eDate
, eMetadata
, eType
, eFormattedType
-- * ListConnectionsResponse
, ListConnectionsResponse
, listConnectionsResponse
, lcrTotalItems
, lcrNextPageToken
, lcrConnections
, lcrNextSyncToken
, lcrTotalPeople
-- * Status
, Status
, status
, sDetails
, sCode
, sMessage
-- * ContactGroupResponse
, ContactGroupResponse
, contactGroupResponse
, cgrStatus
, cgrContactGroup
, cgrRequestedResourceName
-- * FileAs
, FileAs
, fileAs
, faValue
, faMetadata
-- * ContactGroup
, ContactGroup
, contactGroup
, cgEtag
, cgResourceName
, cgMemberResourceNames
, cgFormattedName
, cgName
, cgGroupType
, cgMetadata
, cgMemberCount
, cgClientData
-- * ModifyContactGroupMembersResponse
, ModifyContactGroupMembersResponse
, modifyContactGroupMembersResponse
, mcgmrCanNotRemoveLastContactGroupResourceNames
, mcgmrNotFoundResourceNames
-- * UpdateContactGroupRequest
, UpdateContactGroupRequest
, updateContactGroupRequest
, ucgrContactGroup
, ucgrReadGroupFields
, ucgrUpdateGroupFields
-- * SearchResult
, SearchResult
, searchResult
, srPerson
-- * DomainMembership
, DomainMembership
, domainMembership
, dmInViewerDomain
-- * PeopleConnectionsListSources
, PeopleConnectionsListSources (..)
-- * RelationshipInterest
, RelationshipInterest
, relationshipInterest
, riValue
, riMetadata
, riFormattedValue
-- * BraggingRights
, BraggingRights
, braggingRights
, brValue
, brMetadata
-- * BatchCreateContactsRequest
, BatchCreateContactsRequest
, batchCreateContactsRequest
, bccrReadMask
, bccrSources
, bccrContacts
-- * Membership
, Membership
, membership
, mDomainMembership
, mContactGroupMembership
, mMetadata
-- * SearchResponse
, SearchResponse
, searchResponse
, srResults
-- * PeopleListDirectoryPeopleMergeSources
, PeopleListDirectoryPeopleMergeSources (..)
-- * BatchUpdateContactsResponse
, BatchUpdateContactsResponse
, batchUpdateContactsResponse
, bucrUpdateResult
-- * Location
, Location
, location
, lFloor
, lBuildingId
, lValue
, lCurrent
, lDeskCode
, lMetadata
, lType
, lFloorSection
-- * Person
, Person
, person
, perEmailAddresses
, perAgeRange
, perEtag
, perResidences
, perBiographies
, perTaglines
, perBraggingRights
, perBirthdays
, perResourceName
, perRelations
, perURLs
, perAddresses
, perUserDefined
, perNicknames
, perExternalIds
, perRelationshipStatuses
, perImClients
, perPhoneNumbers
, perOccupations
, perNames
, perGenders
, perPhotos
, perAgeRanges
, perEvents
, perCalendarURLs
, perFileAses
, perCoverPhotos
, perSkills
, perSipAddresses
, perMetadata
, perInterests
, perOrganizations
, perLocales
, perLocations
, perMiscKeywords
, perMemberships
, perRelationshipInterests
, perClientData
-- * Empty
, Empty
, empty
-- * PeopleListDirectoryPeopleSources
, PeopleListDirectoryPeopleSources (..)
-- * SourceType
, SourceType (..)
-- * ListContactGroupsResponse
, ListContactGroupsResponse
, listContactGroupsResponse
, lcgrContactGroups
, lcgrTotalItems
, lcgrNextPageToken
, lcgrNextSyncToken
-- * ContactGroupMembership
, ContactGroupMembership
, contactGroupMembership
, cgmContactGroupResourceName
, cgmContactGroupId
-- * MiscKeyword
, MiscKeyword
, miscKeyword
, mkValue
, mkMetadata
, mkType
, mkFormattedType
-- * BatchCreateContactsResponse
, BatchCreateContactsResponse
, batchCreateContactsResponse
, bccrCreatedPeople
-- * ContactGroupGroupType
, ContactGroupGroupType (..)
-- * UserDefined
, UserDefined
, userDefined
, udValue
, udKey
, udMetadata
-- * PeopleSearchContactsSources
, PeopleSearchContactsSources (..)
-- * CopyOtherContactToMyContactsGroupRequestSourcesItem
, CopyOtherContactToMyContactsGroupRequestSourcesItem (..)
-- * NicknameType
, NicknameType (..)
-- * Locale
, Locale
, locale
, locValue
, locMetadata
-- * PeopleConnectionsListSortOrder
, PeopleConnectionsListSortOrder (..)
-- * RelationshipStatus
, RelationshipStatus
, relationshipStatus
, rsValue
, rsMetadata
, rsFormattedValue
-- * PeopleDeleteContactPhotoSources
, PeopleDeleteContactPhotoSources (..)
-- * URL
, URL
, url
, uValue
, uMetadata
, uType
, uFormattedType
-- * PeopleGetSources
, PeopleGetSources (..)
-- * StatusDetailsItem
, StatusDetailsItem
, statusDetailsItem
, sdiAddtional
-- * DeleteContactPhotoResponse
, DeleteContactPhotoResponse
, deleteContactPhotoResponse
, dcprPerson
-- * UpdateContactPhotoResponse
, UpdateContactPhotoResponse
, updateContactPhotoResponse
, ucprPerson
-- * GroupClientData
, GroupClientData
, groupClientData
, gcdValue
, gcdKey
-- * Address
, Address
, address
, aStreetAddress
, aPoBox
, aCountry
, aPostalCode
, aExtendedAddress
, aCity
, aMetadata
, aCountryCode
, aFormattedValue
, aRegion
, aType
, aFormattedType
-- * ProFileMetadata
, ProFileMetadata
, proFileMetadata
, pfmObjectType
, pfmUserTypes
-- * Relation
, Relation
, relation
, rPerson
, rMetadata
, rType
, rFormattedType
-- * GetPeopleResponse
, GetPeopleResponse
, getPeopleResponse
, gprResponses
-- * ListOtherContactsResponse
, ListOtherContactsResponse
, listOtherContactsResponse
, locrNextPageToken
, locrOtherContacts
, locrTotalSize
, locrNextSyncToken
-- * PeopleSearchDirectoryPeopleSources
, PeopleSearchDirectoryPeopleSources (..)
-- * BatchCreateContactsRequestSourcesItem
, BatchCreateContactsRequestSourcesItem (..)
-- * Birthday
, Birthday
, birthday
, bText
, bDate
, bMetadata
-- * PeopleCreateContactSources
, PeopleCreateContactSources (..)
-- * PersonAgeRange
, PersonAgeRange (..)
-- * PeopleSearchDirectoryPeopleMergeSources
, PeopleSearchDirectoryPeopleMergeSources (..)
-- * Date
, Date
, date
, dDay
, dYear
, dMonth
-- * Tagline
, Tagline
, tagline
, tValue
, tMetadata
-- * Residence
, Residence
, residence
, resValue
, resCurrent
, resMetadata
-- * UpdateContactPhotoRequest
, UpdateContactPhotoRequest
, updateContactPhotoRequest
, ucprSources
, ucprPersonFields
, ucprPhotoBytes
-- * AgeRangeType
, AgeRangeType
, ageRangeType
, artAgeRange
, artMetadata
-- * Gender
, Gender
, gender
, gValue
, gAddressMeAs
, gMetadata
, gFormattedValue
-- * UpdateContactPhotoRequestSourcesItem
, UpdateContactPhotoRequestSourcesItem (..)
-- * Name
, Name
, name
, nGivenName
, nPhoneticHonorificSuffix
, nMiddleName
, nUnstructuredName
, nPhoneticMiddleName
, nPhoneticFamilyName
, nPhoneticHonorificPrefix
, nHonorificPrefix
, nFamilyName
, nMetadata
, nDisplayName
, nDisplayNameLastFirst
, nPhoneticGivenName
, nHonorificSuffix
, nPhoneticFullName
-- * BatchDeleteContactsRequest
, BatchDeleteContactsRequest
, batchDeleteContactsRequest
, bdcrResourceNames
-- * FieldMetadata
, FieldMetadata
, fieldMetadata
, fmVerified
, fmPrimary
, fmSource
, fmSourcePrimary
-- * BatchUpdateContactsRequest
, BatchUpdateContactsRequest
, batchUpdateContactsRequest
, bucrUpdateMask
, bucrReadMask
, bucrSources
, bucrContacts
-- * SearchDirectoryPeopleResponse
, SearchDirectoryPeopleResponse
, searchDirectoryPeopleResponse
, sdprNextPageToken
, sdprPeople
, sdprTotalSize
-- * PhoneNumber
, PhoneNumber
, phoneNumber
, pnCanonicalForm
, pnValue
, pnMetadata
, pnType
, pnFormattedType
-- * ContactToCreate
, ContactToCreate
, contactToCreate
, ctcContactPerson
-- * Occupation
, Occupation
, occupation
, oValue
, oMetadata
-- * Xgafv
, Xgafv (..)
-- * EmailAddress
, EmailAddress
, emailAddress
, eaValue
, eaMetadata
, eaDisplayName
, eaType
, eaFormattedType
-- * CreateContactGroupRequest
, CreateContactGroupRequest
, createContactGroupRequest
, ccgrContactGroup
, ccgrReadGroupFields
-- * ContactGroupMetadata
, ContactGroupMetadata
, contactGroupMetadata
, cgmUpdateTime
, cgmDeleted
-- * ModifyContactGroupMembersRequest
, ModifyContactGroupMembersRequest
, modifyContactGroupMembersRequest
, mcgmrResourceNamesToAdd
, mcgmrResourceNamesToRemove
-- * BatchUpdateContactsResponseUpdateResult
, BatchUpdateContactsResponseUpdateResult
, batchUpdateContactsResponseUpdateResult
, bucrurAddtional
-- * PersonMetadataObjectType
, PersonMetadataObjectType (..)
-- * Source
, Source
, source
, sEtag
, sProFileMetadata
, sUpdateTime
, sId
, sType
-- * MiscKeywordType
, MiscKeywordType (..)
-- * ImClient
, ImClient
, imClient
, icFormattedProtocol
, icUsername
, icProtocol
, icMetadata
, icType
, icFormattedType
-- * BatchGetContactGroupsResponse
, BatchGetContactGroupsResponse
, batchGetContactGroupsResponse
, bgcgrResponses
-- * BiographyContentType
, BiographyContentType (..)
-- * PersonMetadata
, PersonMetadata
, personMetadata
, pmPreviousResourceNames
, pmObjectType
, pmSources
, pmLinkedPeopleResourceNames
, pmDeleted
-- * AgeRangeTypeAgeRange
, AgeRangeTypeAgeRange (..)
-- * Nickname
, Nickname
, nickname
, nicValue
, nicMetadata
, nicType
-- * ExternalId
, ExternalId
, externalId
, eiValue
, eiMetadata
, eiType
, eiFormattedType
-- * ProFileMetadataObjectType
, ProFileMetadataObjectType (..)
-- * ListDirectoryPeopleResponse
, ListDirectoryPeopleResponse
, listDirectoryPeopleResponse
, ldprNextPageToken
, ldprPeople
, ldprNextSyncToken
-- * CopyOtherContactToMyContactsGroupRequest
, CopyOtherContactToMyContactsGroupRequest
, copyOtherContactToMyContactsGroupRequest
, coctmcgrReadMask
, coctmcgrSources
, coctmcgrCopyMask
-- * Organization
, Organization
, organization
, orgDePartment
, orgLocation
, orgDomain
, orgEndDate
, orgSymbol
, orgJobDescription
, orgCurrent
, orgStartDate
, orgName
, orgMetadata
, orgPhoneticName
, orgTitle
, orgType
, orgFormattedType
-- * OtherContactsListSources
, OtherContactsListSources (..)
-- * Interest
, Interest
, interest
, iValue
, iMetadata
-- * PersonResponse
, PersonResponse
, personResponse
, prStatus
, prRequestedResourceName
, prPerson
, prHTTPStatusCode
-- * CalendarURL
, CalendarURL
, calendarURL
, cuURL
, cuMetadata
, cuType
, cuFormattedType
-- * ClientData
, ClientData
, clientData
, cdValue
, cdKey
, cdMetadata
-- * PeopleGetBatchGetSources
, PeopleGetBatchGetSources (..)
-- * ProFileMetadataUserTypesItem
, ProFileMetadataUserTypesItem (..)
-- * Skill
, Skill
, skill
, sValue
, sMetadata
-- * Biography
, Biography
, biography
, bioValue
, bioMetadata
, bioContentType
-- * CoverPhoto
, CoverPhoto
, coverPhoto
, cpDefault
, cpURL
, cpMetadata
) where
import Network.Google.People.Types.Product
import Network.Google.People.Types.Sum
import Network.Google.Prelude
-- | Default request referring to version 'v1' of the People API. This contains the host and root path used as a starting point for constructing service requests.
peopleService :: ServiceConfig
peopleService
= defaultService (ServiceId "people:v1")
"people.googleapis.com"
-- | See and download contact info automatically saved in your \"Other
-- contacts\"
contactsOtherReadOnlyScope :: Proxy '["https://www.googleapis.com/auth/contacts.other.readonly"]
contactsOtherReadOnlyScope = Proxy
-- | See and download your organization\'s GSuite directory
directoryReadOnlyScope :: Proxy '["https://www.googleapis.com/auth/directory.readonly"]
directoryReadOnlyScope = Proxy
-- | See and download your exact date of birth
userBirthdayReadScope :: Proxy '["https://www.googleapis.com/auth/user.birthday.read"]
userBirthdayReadScope = Proxy
-- | See your personal info, including any personal info you\'ve made
-- publicly available
userInfoProFileScope :: Proxy '["https://www.googleapis.com/auth/userinfo.profile"]
userInfoProFileScope = Proxy
-- | See and download your personal phone numbers
userPhoneNumbersReadScope :: Proxy '["https://www.googleapis.com/auth/user.phonenumbers.read"]
userPhoneNumbersReadScope = Proxy
-- | See your primary Google Account email address
userInfoEmailScope :: Proxy '["https://www.googleapis.com/auth/userinfo.email"]
userInfoEmailScope = Proxy
-- | See and download your contacts
contactsReadOnlyScope :: Proxy '["https://www.googleapis.com/auth/contacts.readonly"]
contactsReadOnlyScope = Proxy
-- | View your street addresses
userAddressesReadScope :: Proxy '["https://www.googleapis.com/auth/user.addresses.read"]
userAddressesReadScope = Proxy
-- | See your education, work history and org info
userOrganizationReadScope :: Proxy '["https://www.googleapis.com/auth/user.organization.read"]
userOrganizationReadScope = Proxy
-- | See, edit, download, and permanently delete your contacts
contactsScope :: Proxy '["https://www.googleapis.com/auth/contacts"]
contactsScope = Proxy
-- | See and download all of your Google Account email addresses
userEmailsReadScope :: Proxy '["https://www.googleapis.com/auth/user.emails.read"]
userEmailsReadScope = Proxy
-- | See your gender
userGenderReadScope :: Proxy '["https://www.googleapis.com/auth/user.gender.read"]
userGenderReadScope = Proxy
|
brendanhay/gogol
|
gogol-people/gen/Network/Google/People/Types.hs
|
mpl-2.0
| 16,255 | 0 | 7 | 4,086 | 1,937 | 1,320 | 617 | 513 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.FloodlightActivities.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes an existing floodlight activity.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ Campaign Manager 360 API Reference> for @dfareporting.floodlightActivities.delete@.
module Network.Google.Resource.DFAReporting.FloodlightActivities.Delete
(
-- * REST Resource
FloodlightActivitiesDeleteResource
-- * Creating a Request
, floodlightActivitiesDelete
, FloodlightActivitiesDelete
-- * Request Lenses
, fadXgafv
, fadUploadProtocol
, fadAccessToken
, fadUploadType
, fadProFileId
, fadId
, fadCallback
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.floodlightActivities.delete@ method which the
-- 'FloodlightActivitiesDelete' request conforms to.
type FloodlightActivitiesDeleteResource =
"dfareporting" :>
"v3.5" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"floodlightActivities" :>
Capture "id" (Textual Int64) :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] ()
-- | Deletes an existing floodlight activity.
--
-- /See:/ 'floodlightActivitiesDelete' smart constructor.
data FloodlightActivitiesDelete =
FloodlightActivitiesDelete'
{ _fadXgafv :: !(Maybe Xgafv)
, _fadUploadProtocol :: !(Maybe Text)
, _fadAccessToken :: !(Maybe Text)
, _fadUploadType :: !(Maybe Text)
, _fadProFileId :: !(Textual Int64)
, _fadId :: !(Textual Int64)
, _fadCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'FloodlightActivitiesDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fadXgafv'
--
-- * 'fadUploadProtocol'
--
-- * 'fadAccessToken'
--
-- * 'fadUploadType'
--
-- * 'fadProFileId'
--
-- * 'fadId'
--
-- * 'fadCallback'
floodlightActivitiesDelete
:: Int64 -- ^ 'fadProFileId'
-> Int64 -- ^ 'fadId'
-> FloodlightActivitiesDelete
floodlightActivitiesDelete pFadProFileId_ pFadId_ =
FloodlightActivitiesDelete'
{ _fadXgafv = Nothing
, _fadUploadProtocol = Nothing
, _fadAccessToken = Nothing
, _fadUploadType = Nothing
, _fadProFileId = _Coerce # pFadProFileId_
, _fadId = _Coerce # pFadId_
, _fadCallback = Nothing
}
-- | V1 error format.
fadXgafv :: Lens' FloodlightActivitiesDelete (Maybe Xgafv)
fadXgafv = lens _fadXgafv (\ s a -> s{_fadXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
fadUploadProtocol :: Lens' FloodlightActivitiesDelete (Maybe Text)
fadUploadProtocol
= lens _fadUploadProtocol
(\ s a -> s{_fadUploadProtocol = a})
-- | OAuth access token.
fadAccessToken :: Lens' FloodlightActivitiesDelete (Maybe Text)
fadAccessToken
= lens _fadAccessToken
(\ s a -> s{_fadAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
fadUploadType :: Lens' FloodlightActivitiesDelete (Maybe Text)
fadUploadType
= lens _fadUploadType
(\ s a -> s{_fadUploadType = a})
-- | User profile ID associated with this request.
fadProFileId :: Lens' FloodlightActivitiesDelete Int64
fadProFileId
= lens _fadProFileId (\ s a -> s{_fadProFileId = a})
. _Coerce
-- | Floodlight activity ID.
fadId :: Lens' FloodlightActivitiesDelete Int64
fadId
= lens _fadId (\ s a -> s{_fadId = a}) . _Coerce
-- | JSONP
fadCallback :: Lens' FloodlightActivitiesDelete (Maybe Text)
fadCallback
= lens _fadCallback (\ s a -> s{_fadCallback = a})
instance GoogleRequest FloodlightActivitiesDelete
where
type Rs FloodlightActivitiesDelete = ()
type Scopes FloodlightActivitiesDelete =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient FloodlightActivitiesDelete'{..}
= go _fadProFileId _fadId _fadXgafv
_fadUploadProtocol
_fadAccessToken
_fadUploadType
_fadCallback
(Just AltJSON)
dFAReportingService
where go
= buildClient
(Proxy :: Proxy FloodlightActivitiesDeleteResource)
mempty
|
brendanhay/gogol
|
gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/FloodlightActivities/Delete.hs
|
mpl-2.0
| 5,315 | 0 | 19 | 1,234 | 825 | 476 | 349 | 116 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.FireStore.Projects.Locations.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets information about a location.
--
-- /See:/ <https://cloud.google.com/firestore Cloud Firestore API Reference> for @firestore.projects.locations.get@.
module Network.Google.Resource.FireStore.Projects.Locations.Get
(
-- * REST Resource
ProjectsLocationsGetResource
-- * Creating a Request
, projectsLocationsGet
, ProjectsLocationsGet
-- * Request Lenses
, plgXgafv
, plgUploadProtocol
, plgAccessToken
, plgUploadType
, plgName
, plgCallback
) where
import Network.Google.FireStore.Types
import Network.Google.Prelude
-- | A resource alias for @firestore.projects.locations.get@ method which the
-- 'ProjectsLocationsGet' request conforms to.
type ProjectsLocationsGetResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Location
-- | Gets information about a location.
--
-- /See:/ 'projectsLocationsGet' smart constructor.
data ProjectsLocationsGet =
ProjectsLocationsGet'
{ _plgXgafv :: !(Maybe Xgafv)
, _plgUploadProtocol :: !(Maybe Text)
, _plgAccessToken :: !(Maybe Text)
, _plgUploadType :: !(Maybe Text)
, _plgName :: !Text
, _plgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plgXgafv'
--
-- * 'plgUploadProtocol'
--
-- * 'plgAccessToken'
--
-- * 'plgUploadType'
--
-- * 'plgName'
--
-- * 'plgCallback'
projectsLocationsGet
:: Text -- ^ 'plgName'
-> ProjectsLocationsGet
projectsLocationsGet pPlgName_ =
ProjectsLocationsGet'
{ _plgXgafv = Nothing
, _plgUploadProtocol = Nothing
, _plgAccessToken = Nothing
, _plgUploadType = Nothing
, _plgName = pPlgName_
, _plgCallback = Nothing
}
-- | V1 error format.
plgXgafv :: Lens' ProjectsLocationsGet (Maybe Xgafv)
plgXgafv = lens _plgXgafv (\ s a -> s{_plgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plgUploadProtocol :: Lens' ProjectsLocationsGet (Maybe Text)
plgUploadProtocol
= lens _plgUploadProtocol
(\ s a -> s{_plgUploadProtocol = a})
-- | OAuth access token.
plgAccessToken :: Lens' ProjectsLocationsGet (Maybe Text)
plgAccessToken
= lens _plgAccessToken
(\ s a -> s{_plgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plgUploadType :: Lens' ProjectsLocationsGet (Maybe Text)
plgUploadType
= lens _plgUploadType
(\ s a -> s{_plgUploadType = a})
-- | Resource name for the location.
plgName :: Lens' ProjectsLocationsGet Text
plgName = lens _plgName (\ s a -> s{_plgName = a})
-- | JSONP
plgCallback :: Lens' ProjectsLocationsGet (Maybe Text)
plgCallback
= lens _plgCallback (\ s a -> s{_plgCallback = a})
instance GoogleRequest ProjectsLocationsGet where
type Rs ProjectsLocationsGet = Location
type Scopes ProjectsLocationsGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/datastore"]
requestClient ProjectsLocationsGet'{..}
= go _plgName _plgXgafv _plgUploadProtocol
_plgAccessToken
_plgUploadType
_plgCallback
(Just AltJSON)
fireStoreService
where go
= buildClient
(Proxy :: Proxy ProjectsLocationsGetResource)
mempty
|
brendanhay/gogol
|
gogol-firestore/gen/Network/Google/Resource/FireStore/Projects/Locations/Get.hs
|
mpl-2.0
| 4,549 | 0 | 15 | 1,037 | 698 | 408 | 290 | 101 | 1 |
{-# LANGUAGE RecordWildCards #-}
module Handler.Image where
import Import
import Yesod.Core.Types
import Data.Conduit
import Data.Conduit.List (foldMap)
import Control.Monad.Trans.Resource
import Data.Text.Encoding
import Widgets.Time
getImageR :: Text -> Handler TypedContent
getImageR image_handle = do
Entity _ Image{..} <- runYDB $ getBy404 $ UniqueImageHandle image_handle
respond imageFormat imageData
getImageMetaR :: Text -> Handler Html
getImageMetaR image_handle = do
Entity _ Image{..} <- runYDB $ getBy404 $ UniqueImageHandle image_handle
defaultLayout $(widgetFile "image_metadata")
uploadForm :: Form (Text, FileInfo)
uploadForm = renderBootstrap3 BootstrapBasicForm $ (,)
<$> areq textField "Image Name" Nothing
<*> fileAFormReq "Image File"
getUploadImageR :: Handler Html
getUploadImageR = do
_ <- requireAuthId
(form, enctype) <- generateFormPost uploadForm
defaultLayout $(widgetFile "upload_image")
postUploadImageR :: Handler Html
postUploadImageR = do
user_id <- requireAuthId
((result, _), _) <- runFormPost uploadForm
now <- liftIO getCurrentTime
case result of
FormMissing -> error "form missing"
FormFailure err -> error $ "error processing form:\n" ++ unlines (map (('\t':) . show) err)
FormSuccess (name, FileInfo{..}) -> do
contents <- liftIO $ runResourceT $ fileSourceRaw $$ foldMap id
maybe_image_id <- runDB $ insertUnique $ Image now user_id Nothing name Nothing (encodeUtf8 fileContentType) contents
case maybe_image_id of
Just _ -> redirect $ ImageMetaR name
Nothing -> do
setMessage "that name is already taken, try another"
unnamed_image_id <- runYDB $ insert $ UnnamedImage now user_id Nothing (Just name) Nothing (encodeUtf8 fileContentType) contents
redirect $ NameImageR unnamed_image_id
nameImageForm :: Maybe Text -> Form Text
nameImageForm = renderBootstrap3 BootstrapBasicForm . areq textField "New Image Name"
nameImage :: Text -> UnnamedImage -> Image
nameImage name (UnnamedImage ts uploader project _ origin format contents) = Image ts uploader project name origin format contents
getNameImageR :: UnnamedImageId -> Handler Html
getNameImageR unnamed_image_id = do
viewer_id <- requireAuthId
UnnamedImage{..} <- runYDB $ get404 unnamed_image_id
when (viewer_id /= unnamedImageUploader) $ permissionDenied "you did not upload this image"
(form, enctype) <- generateFormPost $ nameImageForm unnamedImageName
defaultLayout $(widgetFile "name_image")
postNameImageR :: UnnamedImageId -> Handler Html
postNameImageR unnamed_image_id = do
viewer_id <- requireAuthId
unnamed_image@UnnamedImage{..} <- runYDB $ get404 unnamed_image_id
when (viewer_id /= unnamedImageUploader) $ permissionDenied "you did not upload this image"
((result, _), _) <- runFormPost $ nameImageForm unnamedImageName
case result of
FormMissing -> error "form missing"
FormFailure err -> error $ "error processing form:\n" ++ unlines (map (('\t':) . show) err)
FormSuccess name -> do
maybe_image_id <- runDB $ insertUnique $ nameImage name unnamed_image
case maybe_image_id of
Just _ -> redirect $ ImageMetaR name
Nothing -> do
setMessage "that name is also already taken, try another"
runYDB $ update $ \ui -> do
where_ $ ui ^. UnnamedImageId ==. val unnamed_image_id
set ui [ UnnamedImageName =. val (Just name) ]
redirect $ NameImageR unnamed_image_id
|
chreekat/snowdrift
|
Handler/Image.hs
|
agpl-3.0
| 3,736 | 0 | 26 | 895 | 1,009 | 486 | 523 | 73 | 4 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
module StearnsWharf.Repos.SteelElementRepository where
import qualified Data.Map as Map
import Control.Monad (liftM3)
import Control.Applicative ((<$>),(<*>))
import Database.PostgreSQL.Simple (Connection,query)
import Database.PostgreSQL.Simple.FromRow (FromRow,fromRow,field)
import qualified StearnsWharf.Nodes as N
import qualified StearnsWharf.Loads as L
import qualified StearnsWharf.Beams as B
import qualified StearnsWharf.Materials as M
import qualified StearnsWharf.Steel.SteelProfiles as S
data SteelElementDTO =
SteelElementDTO {
elId :: Int,
n1 :: Int,
n2 :: Int,
profileId :: Int,
loadId :: Maybe Int
} deriving Show
instance FromRow S.SteelProfile where
fromRow = S.SteelProfile <$> field <*> field <*> field <*> field <*> field <*> field <*> field <*> field <*> liftM3 M.Steel field field field
instance FromRow SteelElementDTO where
fromRow = SteelElementDTO <$> field <*> field <*> field <*> field <*> field
systemSteelDTO:: Connection
-> Int -- ^ System Id
-> Int -- ^ Load Case
-> IO [SteelElementDTO]
systemSteelDTO conn sysId lc =
(query conn "select oid,n1,n2,profile_id,ld_id from construction.v_steel_elements where sys_id=? and ((ld_case=?) or (ld_id is null)) order by x1,y1" [sysId,lc]) :: IO [SteelElementDTO]
steelElement2Beam :: N.NodeMap
-> L.LoadMap
-> S.SteelProfileMap
-> SteelElementDTO
-> B.Beam S.SteelProfile
steelElement2Beam nm lm steelm el = B.Bjlk33 (elId el) n1' n2' steel ld
where Just n1' = Map.lookup (n1 el) nm
Just n2' = Map.lookup (n2 el) nm
Just steel = Map.lookup (profileId el) steelm
ld = loadId el >>= flip Map.lookup lm
systemSteelElements :: Connection
-> Int -- ^ System Id
-> Int -- ^ Load Case
-> N.NodeMap
-> L.LoadMap
-> IO [B.Beam S.SteelProfile]
systemSteelElements conn sysId lc nm lm =
systemSteelDTO conn sysId lc >>= \dto ->
systemSteelAsMap conn sysId >>= \steelm ->
let steelElement2Beam' = steelElement2Beam nm lm steelm in
return (map steelElement2Beam' dto)
systemSteelProfiles :: Connection
-> Int -- ^ System Id
-> IO [S.SteelProfile]
systemSteelProfiles conn sysId =
(query conn
"select x.oid,x.name,x.b,x.h,x.flange,x.web,x.w_el_y,x.i_y,200000.0 as emodule,355.0 as sigma,251.0 as tau from construction.steel_beams x join construction.steel_elements e on e.profile_id=x.oid where e.sys_id=?" [sysId])
:: IO [S.SteelProfile]
systemSteelAsMap :: Connection
-> Int -- ^ System Id
-> IO S.SteelProfileMap
systemSteelAsMap conn sysId =
systemSteelProfiles conn sysId >>= \profiles ->
return (Map.fromList (map asListItem profiles))
where asListItem x = (S.profileId x, x)
|
baalbek/stearnswharf
|
src/StearnsWharf/Repos/SteelElementRepository.hs
|
lgpl-3.0
| 3,194 | 0 | 15 | 887 | 735 | 400 | 335 | 68 | 1 |
module FreePalace.Domain.GUI where
-- TODO Need Disconnect menu item and Quit menu item
import qualified FreePalace.Domain.Chat as Chat
import qualified System.FilePath as Path
data MainWindow = MainWindow {
quit :: IO (),
showMainWindow :: IO (),
closeMainWindow :: IO (),
onMainWindowClose :: IO () -> IO ()
}
data LogWindow = LogWindow {
showLogWindow :: IO (),
closeLogWindow :: IO (),
appendMessage :: Chat.Communication -> IO ()
}
data Dialog = Dialog {
showDialog :: IO (),
closeDialog :: IO ()
}
data Button = Button {
onButtonClick :: IO () -> IO ()
}
data TextField = TextField {
textValue :: IO String,
clearTextEntry :: IO (),
onEnterKeyPress :: IO () -> IO ()
}
data Canvas = Canvas {
displayBackground :: Path.FilePath -> IO ()
}
data Components = Components {
mainWindow :: MainWindow
, connectDialog :: Dialog
, connectHostEntry :: TextField
, connectPortEntry :: TextField
, connectOk :: Button
, connectCancel :: Button
, logWindow :: LogWindow
, chatEntry :: TextField
, chatSend :: Button
, roomCanvas :: Canvas
}
instance Show Components where
show _ = "Components"
|
psfblair/freepalace
|
src/FreePalace/Domain/GUI.hs
|
apache-2.0
| 1,235 | 0 | 11 | 329 | 355 | 203 | 152 | 36 | 0 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE RecordWildCards #-}
module Target where
import Control.Arrow
import Control.Lens
import Data.AffineSpace
import Data.Default
import qualified Data.Set as Set
import Data.VectorSpace
import Graphics.Gloss
import Graphics.Gloss.Data.Vector
import Graphics.Gloss.Geometry.Angle
import Player
data Target
= Token
{ _tPos :: !Point
, _tSpd :: !Point
}
| Goal
{ _tPos :: !Point
}
deriving (Show)
makeLenses ''Target
instance Default Target where
def = Goal (0,0)
tokenR, goalR, goalW :: Float
tokenR = playerR
goalR = 60
goalW = 5
drawTarget :: Target -> Picture
drawTarget Token{..} = translateP _tPos $ Color black $ circleSolid tokenR
drawTarget Goal{..} = translateP _tPos $ Color black $ thickCircle goalR goalW
stepTarget :: Float -> Target -> Target
stepTarget dt t = case t of
Goal{} -> t
Token{..} -> t & tPos %~ (.+^ _tSpd)
-- TODO(klao): refactor
bounceOffBorder' :: (Float,Float) -> Float -> Target -> Target
bounceOffBorder' sz bw t@Token{..} = t & horiz & vert
where
(w,h) = sz & both %~ (/2) & both -~ (bw + tokenR)
(x,y) = _tPos
(vx, vy) = _tSpd
horiz = if abs x > w && signum x == signum vx then tSpd._1 %~ negate else id
vert = if abs y > h && signum y == signum vy then tSpd._2 %~ negate else id
bounceOffBorder' _ _ t = t
|
nilcons/tag-game
|
src/Target.hs
|
apache-2.0
| 1,437 | 0 | 11 | 375 | 501 | 274 | 227 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ru-RU">
<title>Script Console</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Индекс</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Поиск</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
secdec/zap-extensions
|
addOns/scripts/src/main/javahelp/org/zaproxy/zap/extension/scripts/resources/help_ru_RU/helpset_ru_RU.hs
|
apache-2.0
| 970 | 77 | 66 | 156 | 427 | 215 | 212 | -1 | -1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module RulesGen.Rules where
import qualified Data.Map as Map
newtype NonterminalID = NonterminalID String
deriving (Show, Eq, Ord)
newtype Rules = Rules (Map.Map NonterminalID Rule)
deriving (Show)
newtype Rule = Rule (Map.Map ProductionID (Int, [Symbol]))
deriving (Show)
newtype ProductionID = ProductionID Int
deriving (Show, Eq, Ord, Enum)
data Symbol
= Terminal !Char
| RandomNonterminal !NonterminalID
| BoundNonterminal !NonterminalID
deriving (Show)
|
arotenberg/rulesgen
|
src/RulesGen/Rules.hs
|
apache-2.0
| 536 | 0 | 9 | 97 | 153 | 88 | 65 | 22 | 0 |
module MDC_Utils where
import Data.List
import Data.List.Split
--function to coma separate values, from a list containing doubles
listSeparate :: String -> [Double] -> String
listSeparate c xs = concat . intersperse c . map show $ xs
--convert from list of strings to list of doubles
string2Double :: [String] -> [Double]
string2Double xs = [ fst (head (reads x :: [(Double, String)])) | x <- xs] --reads return a list of a tuple with fst value double.
--grab a string like */2013/09/14.txt and return 2013-09-14
--split string in / to a list
--grab the last 3 / of the list
--concatenate with - the last 3
file2Date :: String -> String
file2Date xs = concat . intersperse "-" . map (take 4) $ listdate
where listdate = reverse( take 3 (reverse splitlist) )
where splitlist = splitOn "/" xs
--returns the value of the input file line, first argument is delimiter, second
getValFromLine :: String -> String -> Double
getValFromLine c xs = head(string2Double[last (splitOn c xs)])
|
ozkar99/HaskellEstadistica
|
MDC/MDC_Utils.hs
|
bsd-2-clause
| 989 | 1 | 12 | 177 | 252 | 134 | 118 | 13 | 1 |
--
-- Copyright (c) 2013, Carl Joachim Svenn
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- 1. Redistributions of source code must retain the above copyright notice, this
-- list of conditions and the following disclaimer.
-- 2. Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-- ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-- WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-- (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-- LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-- SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module MEnv.IOS
(
MEnv (..),
runMEnvIOS,
module MEnv.IOS.MEnvInit,
) where
import Foreign
import Foreign.Marshal.Alloc
import Data.IORef
import Control.Monad.Trans
import Control.Monad.State
import MEnv.IOS.MEnvInit
--------------------------------------------------------------------------------
-- MEnv
-- | the MEnv monad: computations inside an environment
newtype MEnv res a =
MEnv
{
menvUnwrap :: StateT res IO a
}
deriving
(
Monad,
MonadIO,
MonadState res,
Functor
)
--------------------------------------------------------------------------------
-- runMEnvIOS
-- | init environment, then run MEnv inside. on iOS.
runMEnvIOS :: MEnvInit -> IO res -> (res -> IO ()) ->
(a -> MEnv res b) ->
(b -> MEnv res b) ->
(b -> MEnv res c) ->
a ->
IO c
runMEnvIOS init loadResource unloadResource
begin
iterate
end = \a -> do
-- set init
alloca $ \ptr -> do
poke ptr init
ios_init ptr
refEnv <- newIORef $ error "runMEnvIOS: refEnv undefined"
refB <- newIORef $ error "ruMEnvIOS: refB undefined"
-- a -> m b
-- create callback into Haskell from Foreign
funptrBegin <- mkHaskellCall $ do
-- create resource
env <- loadResource
(b, env') <- runStateT (menvUnwrap $ begin a) env
writeIORef refB b
writeIORef refEnv env'
-- b -> m b
-- create callback into Haskell from Foreign
funptrIterate <- mkHaskellCall $ do
env <- readIORef refEnv
b <- readIORef refB
(b', env') <- runStateT (menvUnwrap $ iterate b) env
writeIORef refB b'
writeIORef refEnv env'
-- call Haskell from Foreign. this function should not return.
ios_main funptrBegin funptrIterate
-- (the following code should not run in practice)
freeHaskellFunPtr funptrIterate
freeHaskellFunPtr funptrBegin
-- b -> m c
b <- readIORef refB
env <- readIORef refEnv
(c, env') <- runStateT (menvUnwrap $ end b) env
unloadResource env'
return c
-- | create 'void (*fun_ptr)()' of 'IO ()'
foreign import ccall "wrapper" mkHaskellCall
:: IO () -> IO (FunPtr (IO ()))
-- | void ios_init(IOSInit* )
foreign import ccall "ios_init" ios_init
:: Ptr MEnvInit -> IO ()
-- | void ios_main(void (*begin)(), void (*iterate)())
foreign import ccall safe "ios_main" ios_main
:: FunPtr (IO ()) -> FunPtr (IO ()) -> IO ()
|
karamellpelle/MEnv
|
source/MEnv/IOS.hs
|
bsd-2-clause
| 4,205 | 0 | 16 | 1,097 | 639 | 337 | 302 | 63 | 1 |
{-# LANGUAGE RecordWildCards #-}
module Development.Hake.Types (
hakeParser
, HakeMode(..)
, InitOptions(..)
, PersistedEnvironment(..)) where
import Control.DeepSeq
import Control.Monad (liftM5)
import Data.Binary
import Data.Hashable
import Data.Typeable
import Development.Shake.FilePath
import Options.Applicative
data Verbosity = Quiet | Verbose deriving (Enum,Read,Show)
data InitOptions = InitOptions {
ioDesiredPrefix :: FilePath,
ioKeepGoing :: Bool,
ioAdditionalPackageDbs :: [FilePath]
}
data HakeMode = Init InitOptions
data HakeOptions = HakeOptions {
hoVerbosity :: Verbosity,
hoMode :: HakeMode
}
verbosityParser :: Parser Verbosity
verbosityParser = option auto ( short 'v' <> long "verbose" <> value Quiet <> help "Desired verbosity level")
keepGoingParser :: Parser Bool
keepGoingParser = switch (short 'k' <> long "keep-going" <> help "Continue as much as possible after an error")
hakeModeParser :: Parser HakeMode
hakeModeParser = hsubparser $
command "init" (info (Init <$> (InitOptions
<$> option str (value ("dist" </> "build") <> long "prefix" <> help "Installation prefix")
<*> keepGoingParser
<*> many (option str (long "package-db" <> help "Additional Package DBs for finding dependencies."))
)) $ progDesc "init")
hakeParser :: ParserInfo HakeOptions
hakeParser = info ((HakeOptions <$> verbosityParser <*> hakeModeParser) <**> helper)
(progDesc "The hake program" <> fullDesc)
data PersistedEnvironment = PersistedEnvironment
{ penvRootDirectory :: FilePath
, penvBuildDirectory :: FilePath
, penvPrefixDirectory :: FilePath
, penvPkgConfDirectory :: FilePath
, penvAdditionalPkgConfDirectories :: [FilePath]
} deriving (Show, Eq, Ord, Typeable)
instance Binary PersistedEnvironment where
put PersistedEnvironment{..} = do
put penvRootDirectory
put penvBuildDirectory
put penvPrefixDirectory
put penvPkgConfDirectory
put penvAdditionalPkgConfDirectories
get =
liftM5
PersistedEnvironment
get
get
get
get
get
instance Hashable PersistedEnvironment where
hashWithSalt s PersistedEnvironment{..} = hashWithSalt s
( penvRootDirectory
, penvBuildDirectory
, penvPrefixDirectory
, penvPkgConfDirectory
, penvAdditionalPkgConfDirectories)
instance NFData PersistedEnvironment where
rnf PersistedEnvironment{..} =
rnf penvRootDirectory `seq`
rnf penvBuildDirectory `seq`
rnf penvPrefixDirectory `seq`
rnf penvPkgConfDirectory `seq`
rnf penvAdditionalPkgConfDirectories `seq`
()
|
HakeIO/hake
|
hake-library/src/Development/Hake/Types.hs
|
bsd-3-clause
| 2,622 | 0 | 22 | 504 | 644 | 345 | 299 | 73 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Network.CrazeSpec where
import Control.Concurrent
import Control.Monad.IO.Class
import Data.ByteString (ByteString, isInfixOf)
import Network.Craze
import Network.Curl
import Network.HTTP.Base hiding (port)
import Network.HTTP.Proxy.Server
import Test.Hspec
oneSecond :: Int
oneSecond = 1000000
runDelayedProxy :: Integer -> Int -> ByteString -> IO ()
runDelayedProxy port delay fixedBody = proxyMain (def :: Settings ByteString)
{ responseModifier = \_ res
-> threadDelay (delay * oneSecond)
>> return (res { rspBody = fixedBody})
, portnum = port
, hostname = Just "localhost"
}
racer :: Racer [(String, String)] ByteString ByteString
racer = Racer
{ racerHandler = pure . respBody
, racerChecker = not . isInfixOf "Something"
, racerProviders
= [simple [CurlProxy "localhost", CurlProxyPort 8082]
, simple [CurlProxy "localhost", CurlProxyPort 8083]
, simple [CurlProxy "localhost", CurlProxyPort 8084]
, delayed [CurlProxy "localhost", CurlProxyPort 8085] 2000000
]
, racerDebug = True
, racerReturnLast = False
}
failingRacer :: Racer [(String, String)] ByteString ByteString
failingRacer = racer
{ racerChecker = isInfixOf "OMG WHY"
, racerReturnLast = True
, racerProviders
= [simple [CurlProxy "localhost", CurlProxyPort 8086]
, simple [CurlProxy "localhost", CurlProxyPort 8087]
, simple [CurlProxy "localhost", CurlProxyPort 8088]
, delayed [CurlProxy "localhost", CurlProxyPort 8089] 2000000
]
}
spec :: Spec
spec = describe "Network.Craze" $
describe "raceGet" $ do
it "should race GET requests" $ do
proxies <- mapM (liftIO . forkIO)
[ runDelayedProxy 8082 5 "Hoogle"
, runDelayedProxy 8083 2 "Hayoo"
, runDelayedProxy 8084 1 "Something"
, runDelayedProxy 8085 1 "Slow"
]
liftIO $ threadDelay (1 * oneSecond) >> putStrLn "Waited 1 secs..."
response <- liftIO $ raceGet racer "http://www.google.com"
liftIO $ mapM_ killThread proxies
response `shouldSatisfy`
\x -> case x of
Just a -> "Hayoo" `isInfixOf` a
Nothing -> False
it "should return the last when requested" $ do
proxies <- mapM (liftIO . forkIO)
[ runDelayedProxy 8086 10 "Hoogle"
, runDelayedProxy 8087 2 "Hayoo"
, runDelayedProxy 8088 1 "Something"
, runDelayedProxy 8089 1 "Slow"
]
liftIO $ threadDelay (1 * oneSecond) >> putStrLn "Waited 1 secs..."
response <- liftIO $ raceGet failingRacer "http://www.google.com"
liftIO $ mapM_ killThread proxies
response `shouldSatisfy`
\x -> case x of
Just a -> "Hoogle" `isInfixOf` a
Nothing -> False
|
etcinit/craze
|
test/Network/CrazeSpec.hs
|
bsd-3-clause
| 2,806 | 0 | 16 | 697 | 790 | 415 | 375 | 68 | 3 |
-----------------------------------------------------------------------------
-- |
-- Module : TestSuite.CRC.CCITT_Unidir
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Test suite for Examples.CRC.CCITT_Unidir
-----------------------------------------------------------------------------
module TestSuite.CRC.CCITT_Unidir(tests) where
import Data.SBV.Tools.Polynomial
import Utils.SBVTestFramework
-- Test suite
tests :: TestTree
tests =
testGroup "CCITT_Unidir"
[ testCase "ccitHDis3" (assertIsThm (crcUniGood 3))
, testCase "ccitHDis4" (assertIsntThm (crcUniGood 4))
]
-- We don't have native support for 48 bits in Data.SBV
-- So, represent as 32 high-bits and 16 low
type SWord48 = (SWord32, SWord16)
extendData :: SWord48 -> SWord64
extendData (h, l) = h # l # 0
mkFrame :: SWord48 -> SWord64
mkFrame msg@(h, l) = h # l # crc_48_16 msg
crc_48_16 :: SWord48 -> SWord16
crc_48_16 msg = res
where msg64, divisor :: SWord64
msg64 = extendData msg
divisor = polynomial [16, 12, 5, 0]
crc64 = pMod msg64 divisor
(_, res) = split (snd (split crc64))
diffCount :: [SBool] -> [SBool] -> SWord8
diffCount xs ys = count $ zipWith (.==) xs ys
where count [] = 0
count (b:bs) = let r = count bs in ite b r (1+r)
-- returns true if there's a 0->1 error (1->0 is ok)
nonUnidir :: [SBool] -> [SBool] -> SBool
nonUnidir [] _ = false
nonUnidir _ [] = false
nonUnidir (a:as) (b:bs) = (bnot a &&& b) ||| nonUnidir as bs
crcUniGood :: SWord8 -> SWord48 -> SWord48 -> SBool
crcUniGood hd sent received =
sent ./= received ==> nonUnidir frameSent frameReceived ||| diffCount frameSent frameReceived .> hd
where frameSent = blastLE $ mkFrame sent
frameReceived = blastLE $ mkFrame received
{-# ANN crc_48_16 ("HLint: ignore Use camelCase" :: String) #-}
|
josefs/sbv
|
SBVTestSuite/TestSuite/CRC/CCITT_Unidir.hs
|
bsd-3-clause
| 1,942 | 0 | 11 | 415 | 529 | 289 | 240 | 34 | 2 |
--------------------------------------------------------------------------------
-- |
-- Module : Database.EventStore.Internal.Operations
-- Copyright : (C) 2015 Yorick Laupa
-- License : (see the file LICENSE)
--
-- Maintainer : Yorick Laupa <[email protected]>
-- Stability : provisional
-- Portability : non-portable
--
-- Mega module to easily import operation in the main EventStore module.
--------------------------------------------------------------------------------
module Database.EventStore.Internal.Operations
( module Database.EventStore.Internal.Operation.Catchup
, module Database.EventStore.Internal.Operation.DeleteStream
, module Database.EventStore.Internal.Operation.PersistOperations
, module Database.EventStore.Internal.Operation.ReadAllEvents
, module Database.EventStore.Internal.Operation.ReadEvent
, module Database.EventStore.Internal.Operation.ReadStreamEvents
, module Database.EventStore.Internal.Operation.StreamMetadata
, module Database.EventStore.Internal.Operation.Transaction
, module Database.EventStore.Internal.Operation.WriteEvents
) where
--------------------------------------------------------------------------------
import Database.EventStore.Internal.Operation.Catchup
import Database.EventStore.Internal.Operation.DeleteStream
import Database.EventStore.Internal.Operation.PersistOperations
import Database.EventStore.Internal.Operation.ReadAllEvents
import Database.EventStore.Internal.Operation.ReadEvent
import Database.EventStore.Internal.Operation.ReadStreamEvents
import Database.EventStore.Internal.Operation.StreamMetadata
import Database.EventStore.Internal.Operation.Transaction
import Database.EventStore.Internal.Operation.WriteEvents
|
YoEight/eventstore
|
Database/EventStore/Internal/Operations.hs
|
bsd-3-clause
| 1,769 | 0 | 5 | 171 | 176 | 137 | 39 | 19 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Distribution.Types.BenchmarkType (
BenchmarkType(..),
knownBenchmarkTypes,
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Version
import Distribution.Text
import Text.PrettyPrint as Disp
-- | The \"benchmark-type\" field in the benchmark stanza.
--
data BenchmarkType = BenchmarkTypeExe Version
-- ^ \"type: exitcode-stdio-x.y\"
| BenchmarkTypeUnknown String Version
-- ^ Some unknown benchmark type e.g. \"type: foo\"
deriving (Generic, Show, Read, Eq, Typeable, Data)
instance Binary BenchmarkType
knownBenchmarkTypes :: [BenchmarkType]
knownBenchmarkTypes = [ BenchmarkTypeExe (Version [1,0] []) ]
instance Text BenchmarkType where
disp (BenchmarkTypeExe ver) = text "exitcode-stdio-" <<>> disp ver
disp (BenchmarkTypeUnknown name ver) = text name <<>> char '-' <<>> disp ver
parse = stdParse $ \ver name -> case name of
"exitcode-stdio" -> BenchmarkTypeExe ver
_ -> BenchmarkTypeUnknown name ver
|
sopvop/cabal
|
Cabal/Distribution/Types/BenchmarkType.hs
|
bsd-3-clause
| 1,130 | 0 | 11 | 248 | 245 | 135 | 110 | 22 | 1 |
module History where
-- Create new history.
new :: a -> ([a], a, [a])
new a = ([], a, [])
-- Get present value.
present :: ([a], a, [a]) -> a
present (_, present, _) = present
-- Accept new present value. Adjust history.
record :: ([a], a, [a]) -> a -> ([a], a, [a])
record (past, prev, _) cur = ([prev] ++ past, cur, [])
-- Check if we have the past.
hasPrevious :: ([a], a, [a]) -> Bool
hasPrevious ([], _, _) = False
hasPrevious _ = True
-- Move back in the past.
previous :: ([a], a, [a]) -> ([a], a, [a])
previous (past, present, future) =
let
past' = tail past
present' = head past
future' = [present] ++ future
in (past', present', future')
-- Check if we have the future.
hasNext :: ([a], a, [a]) -> Bool
hasNext (_, _, []) = False
hasNext _ = True
-- Move forward to the future.
next :: ([a], a, [a]) -> ([a], a, [a])
next (past, present, future) =
let
past' = [present] ++ past
present' = head future
future' = tail future
in (past', present', future')
-- Move to the oldest value.
oldest :: ([a], a, [a]) -> ([a], a, [a])
oldest time@([], _, _) = time
oldest time@_ = oldest $ previous time
|
sakhnik/FreeCell
|
History.hs
|
bsd-3-clause
| 1,201 | 0 | 10 | 317 | 557 | 335 | 222 | 30 | 1 |
-- $Id: HsPatStruct.hs,v 1.29 2005/05/31 02:25:25 hallgren Exp $
module HsPatStruct where
import SrcLoc1
import HsIdent
import HsLiteral
import HsFieldsStruct
import Data.Generics
data PI i p
= HsPId (HsIdentI i) -- Variables and nullary constructors
| HsPLit SrcLoc HsLiteral -- Literal
| HsPNeg SrcLoc HsLiteral -- only numeric literals can be negated
| HsPSucc SrcLoc i HsLiteral -- the horrible n+k pattern -- integer literal
| HsPInfixApp p i p -- For example fx:xs
| HsPApp i [p] -- Constructor applications
| HsPTuple SrcLoc [p] -- Tuple pattern, (p_1,...,p_n)
| HsPList SrcLoc [p] -- List pattern, [p_1,...,p_n]
| HsPParen p -- (p)
| HsPRec i (HsFieldsI i p) -- C{f_1=p_1,...,f_n=p_n}
| HsPAsPat i p -- x@p
| HsPWildCard -- _
| HsPIrrPat p -- ~p
deriving (Ord, Read, Eq, Show, Data, Typeable)
{- instance (Eq i, Eq p) => Eq (PI i p) where
HsPId i == HsPId i1 = i == i1
HsPLit _ i == HsPLit _ i1 = i == i1
HsPNeg _ i == HsPNeg _ i1 = i == i1
HsPSucc _ i k == HsPSucc _ i1 k1 = i == i1 && k == k1
HsPInfixApp x op z == HsPInfixApp x1 op1 z1 = x == x1 && op == op1 && z == z1
HsPApp i xs == HsPApp i1 xs1 = i == i1 && xs == xs1
HsPTuple _ xs == HsPTuple _ xs1 = xs == xs1
HsPList _ xs == HsPList _ xs1 = xs == xs1
HsPParen p == HsPParen p1 = p == p1
HsPRec i ups == HsPRec i1 ups1 = i == i1 && ups == ups1
HsPAsPat i p == HsPAsPat i1 p1 = i == i1 && p == p1
HsPWildCard == HsPWildCard = True
HsPIrrPat p == HsPIrrPat p1 = p == p1 -}
|
forste/haReFork
|
tools/base/AST/HsPatStruct.hs
|
bsd-3-clause
| 1,967 | 0 | 8 | 833 | 177 | 109 | 68 | 21 | 0 |
-- adapted code from the Haskoin project
module BitD.Protocol.BitcoinAddress
( addressToHash160
, hash160ToAddress
, AddressVersion(..)
)
where
import BitD.Protocol.Types (Hash256(..), Hash160(..), Address(..))
import qualified Crypto.Hash.SHA256 as SHA256
import Control.Monad (guard)
import Control.Applicative ((<$>))
import qualified Data.ByteString as BS
import qualified Data.HashMap.Strict as Map
import Data.Bits (shiftR, shiftL, (.|.))
import Data.Maybe (fromJust)
import Data.List (unfoldr)
import Data.Word (Word8)
import Data.Char (ord)
checkSum :: BS.ByteString -> BS.ByteString
checkSum = BS.take 4 . SHA256.hash . SHA256.hash
b58String :: String
b58String = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
b58Data :: BS.ByteString
b58Data = BS.pack $ map (fromIntegral . ord) b58String
b58Data' :: Map.HashMap Word8 Int
b58Data' = Map.fromList $ zip (BS.unpack b58Data) [0..57]
b58 :: Word8 -> Word8
b58 i = BS.index b58Data (fromIntegral i)
b58' :: Word8 -> Maybe Word8
b58' w = fromIntegral <$> Map.lookup w b58Data'
bsToInteger :: BS.ByteString -> Integer
bsToInteger = (foldr f 0) . reverse . BS.unpack
where
f w n = (toInteger w) .|. shiftL n 8
integerToBS :: Integer -> BS.ByteString
integerToBS 0 = BS.pack [0]
integerToBS i
| i > 0 = BS.pack $ reverse $ unfoldr f i
| otherwise = error "integerToBS not defined for negative values"
where f 0 = Nothing
f x = Just $ (fromInteger x :: Word8, x `shiftR` 8)
encodeBase58I :: Integer -> BS.ByteString
encodeBase58I 0 = BS.pack [b58 0]
encodeBase58I i
| i >= 0 = go BS.empty i
| otherwise = error "encodeBase58 is not defined for negative Integers"
where
go acc 0 = acc
go acc n = go (BS.cons (fromIntegral b) acc) q
where
(q,r) = n `quotRem` 58
b = b58 $ fromIntegral r
encodeBase58 :: BS.ByteString -> BS.ByteString
encodeBase58 bs = BS.append l r
where
(z,b) = BS.span (== 0) bs
l = BS.map b58 z -- preserve leading 0's
r | BS.null b = BS.empty
| otherwise = encodeBase58I $ bsToInteger b
decodeBase58 :: BS.ByteString -> Maybe BS.ByteString
decodeBase58 bs = r >>= return . BS.append prefix
where (z,b) = BS.span (== (b58 0)) bs
prefix = BS.map (fromJust . b58') z -- preserve leading 1's
r | BS.null b = Just BS.empty
| otherwise = integerToBS <$> foldl f (Just 0) (BS.unpack b)
f i w = do
n <- fromIntegral <$> b58' w
p <- i
return $ p*58 + n
decodeBase58Check :: BS.ByteString -> Maybe BS.ByteString
decodeBase58Check bs = do
rs <- decodeBase58 bs
let (res,chk) = BS.splitAt (BS.length rs - 4) rs
guard $ chk == checkSum res
return res
encodeBase58Check :: BS.ByteString -> BS.ByteString
encodeBase58Check bs = encodeBase58 $ BS.append bs chk
where chk = checkSum bs
addressToHash160 :: Address -> Maybe Hash160
addressToHash160 (Address s) = (Hash160 . BS.drop 1) <$> decodeBase58Check s
data AddressVersion = VersionPayToAddress
| VersionPayToScriptHash
-- main net
addressVersionToVersionByte :: AddressVersion -> Word8
addressVersionToVersionByte VersionPayToAddress = 0
addressVersionToVersionByte VersionPayToScriptHash = 5
hash160ToAddress :: Hash160 -> AddressVersion -> Address
hash160ToAddress (Hash160 h) version = Address $ encodeBase58Check ((addressVersionToVersionByte version) `BS.cons` h)
|
benma/bitd
|
src/BitD/Protocol/BitcoinAddress.hs
|
bsd-3-clause
| 3,447 | 0 | 14 | 738 | 1,204 | 628 | 576 | 80 | 2 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE LambdaCase #-}
module Run.SendReceive where
import Control.Applicative
import Control.Concurrent
import Control.Monad
import qualified Data.Configurator as Conf
import qualified Data.Configurator.Types as Conf
import Data.Maybe
import qualified Data.Text as Text
import Network
import Network.Xmpp
import System.Exit
import System.Log.Logger
import System.Timeout
import Test.HUnit
import Test.Hspec.Expectations
import Run.Payload
import Run.Config
xmppConfig :: ConnectionDetails -> SessionConfiguration
xmppConfig det = def{sessionStreamConfiguration
= def{connectionDetails = det}
, onConnectionClosed = \sess _ -> do
_ <- reconnect' sess
_ <- sendPresence presenceOnline sess
return ()
}
-- | reflect messages to their origin
reflect :: Session -> IO b
reflect sess = forever $ do
m <- getMessage sess
case answerMessage m (messagePayload m) of
Nothing -> return ()
Just am ->
void $ sendMessage am{messageAttributes = messageAttributes m} sess
testAttributes = [( "{org.pontarius.xmpp.test}testattr"
, "testvalue 12321 åäü>"
)]
run :: IO ()
run = void $ do
conf <- loadConfig
uname1 <- Conf.require conf "xmpp.user1"
pwd1 <- Conf.require conf "xmpp.password1"
uname2 <- Conf.require conf "xmpp.user1"
pwd2 <- Conf.require conf "xmpp.password1"
realm <- Conf.require conf "xmpp.realm"
server <- Conf.lookup conf "xmpp.server"
port <- Conf.lookup conf "xmpp.port" :: IO (Maybe Integer)
let conDetails = case server of
Nothing -> UseRealm
Just srv -> case port of
Nothing -> UseSrv srv
Just p -> UseHost srv (fromIntegral p)
_ <- configuredLoglevel conf
mbSess1 <- session realm (simpleAuth uname1 pwd1)
((xmppConfig conDetails))
sess1 <- case mbSess1 of
Left e -> do
assertFailure $ "session 1 could not be initialized" ++ show e
exitFailure
Right r -> return r
mbSess2 <- session realm (simpleAuth uname2 pwd2)
((xmppConfig conDetails))
sess2 <- case mbSess2 of
Left e -> do
assertFailure $ "session 2 could not be initialized" ++ show e
exitFailure
Right r -> return r
Just jid1 <- getJid sess1
Just jid2 <- getJid sess2
_ <- sendPresence presenceOnline sess1
_ <- forkIO $ reflect sess1
forkIO $ iqResponder sess1
_ <- sendPresence presenceOnline sess2
-- check message responsiveness
infoM "Pontarius.Xmpp" "Running message mirror"
sendMessage message{ messageTo = Just jid1
, messageAttributes = testAttributes
} sess2
resp <- timeout 3000000 $ waitForMessage (\m -> messageFrom m == Just jid1)
sess2
case resp of
Nothing -> assertFailure "Did not receive message answer"
Just am -> messageAttributes am `shouldBe` testAttributes
infoM "Pontarius.Xmpp" "Done running message mirror"
infoM "Pontarius.Xmpp" "Running IQ tests"
testPayload jid1 sess2
infoM "Pontarius.Xmpp" "Done running IQ tests"
|
Philonous/pontarius-xmpp
|
tests/Run/SendReceive.hs
|
bsd-3-clause
| 3,629 | 0 | 19 | 1,180 | 862 | 417 | 445 | 87 | 6 |
-- https://www.reddit.com/r/dailyprogrammer/comments/5e4mde/20161121_challenge_293_easy_defusing_the_bomb/
import qualified Data.Map as Map
import Data.Map (Map)
data Color = White | Red | Black | Orange | Green | Purple
deriving (Show, Read, Enum, Eq, Ord)
getNexts :: Color -> [Color]
getNexts White = [Red, Orange, Green, Purple]
getNexts Red = [Green]
getNexts Black = [Red, Black, Purple]
getNexts Orange = [Red, Black]
getNexts Green = [Orange, White]
getNexts Purple = [Red, Black]
fromList :: [Color] -> Map Color Int
fromList = Map.fromListWith (+) . flip zip (repeat 1)
-- ^ cut a wire and update the existing wires.
defuseWire :: Color -> Map Color Int -> Maybe (Map Color Int)
defuseWire c m = Map.lookup c m >>= \v -> return $ Map.update (if v < 2 then const Nothing else Just . pred) c m
defuseFrom :: Color -> Maybe (Map Color Int) -> Bool
defuseFrom _ Nothing = False
defuseFrom c (Just m)
| Map.null m = c `elem` [White, Black, Purple]
| otherwise = or . map (\(c', m') -> defuseFrom c' (defuseWire c' m')) $ zip (getNexts c) (repeat m)
-- ^ defuse set of wires, not assuming cuts are ordered
-- The quiz it self assumes cuts are ordered, which is even simpler.
defuse :: [Color] -> Bool
defuse cs = or $ map (\c -> defuseFrom c (Just m)) t
where m = fromList cs
t = enumFromTo White Purple
defuseE293 :: [Color] -> Bool
defuseE293 [] = True
defuseE293 (c:[]) = c `elem` [White, Black, Purple]
defuseE293 (c:c':cs)
| c' `elem` (getNexts c) = defuseE293 (c':cs)
| otherwise = False
--
ex1 = [White, Red, Green, White]
ex2 = [White, Orange, Green, White]
|
wangbj/excises
|
e293.hs
|
bsd-3-clause
| 1,629 | 0 | 13 | 334 | 657 | 357 | 300 | 32 | 2 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2014 Galois, Inc.
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
import Distribution.Simple
main = defaultMainWithHooks autoconfUserHooks
|
TomMD/cryptol
|
Setup.hs
|
bsd-3-clause
| 268 | 0 | 5 | 56 | 21 | 14 | 7 | 2 | 1 |
module Main where
import Word2Vec
import System.Environment (getArgs)
main :: IO ()
main = do
args <- getArgs
case args of
"train" : path : num : _ -> trainWith path $ read num
"most-similar" : words@(w:ws) -> mostSimilar words
_ -> showHelp
showHelp :: IO ()
showHelp = do
putStrLn "usage: wv <command> [<args>]"
putStrLn ""
putStrLn " train <file path> Train matrices and save them as files"
putStrLn " most-similar <words> List most similar words to the given ones"
putStrLn ""
|
shuhei/wv
|
app/Main.hs
|
bsd-3-clause
| 520 | 0 | 13 | 122 | 148 | 71 | 77 | 17 | 3 |
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
-- |
-- Copyright : (C) 2014 Dimitri Sabadie
-- License : BSD3
--
-- Maintainer : Dimitri Sabadie <[email protected]>
-- Stability : experimental
-- Portability : portable
--
----------------------------------------------------------------------------
module Quaazar.Technics.Lighting.Phong where
import Data.Aeson
import Numeric.Natural ( Natural )
import Quaazar.Render.GL.Shader
import Quaazar.Render.GL.Texture
import Quaazar.Render.Semantics
import Quaazar.System.Resource
data PhongMaterial = PhongMaterial {
diffuseMap :: Texture2D
, specularMap :: Texture2D
, glossMap :: Texture2D
}
data PhongMaterialManifest = PhongMaterialManifest String String String
instance FromJSON PhongMaterialManifest where
parseJSON = withObject "phong material" $ \o ->
PhongMaterialManifest
<$> o .: "diffuse"
<*> o .: "specular"
<*> o .: "gloss"
instance Load () PhongMaterialManifest where
loadRoot = const "materials"
loadExt = const "qmat"
getPhong :: (MonadScoped IO m,MonadIO m,MonadLogger m,MonadError Log m)
=> m (Program,PhongMaterial -> ShaderSemantics())
getPhong = do
prog <- buildProgram phongVS Nothing Nothing phongFS
return (prog,semantics)
where
semantics mat = do
toUniform (extendUniformSem PhongDiffMapSem) $= (diffuseMap mat,Unit 0)
toUniform (extendUniformSem PhongSpecMapSem) $= (specularMap mat,Unit 1)
toUniform (extendUniformSem PhongGlossMapSem) $= (glossMap mat,Unit 2)
data PhongMaterialSem
= PhongDiffMapSem
| PhongSpecMapSem
| PhongGlossMapSem
deriving (Enum,Eq,Ord,Show)
phongVS :: String
phongVS = unlines
[
"#version 430 core"
, "layout (location = 0) in vec3 co;"
, "layout (location = 1) in vec3 no;"
, "layout (location = 2) in vec2 uv;"
, declUniform CamProjViewSem "mat4 projView"
, declUniform ModelSem "mat4 model"
, "out vec3 vco;"
, "out vec3 vno;"
, "out vec2 vuv;"
, "void main() {"
, " vco = (model * vec4(co,1.)).xyz;"
, " vno = normalize((transpose(inverse(model)) * vec4(no,1.)).xyz);"
, " vuv = uv;"
, " gl_Position = projView * vec4(vco,1.);"
, "}"
]
phongFS :: String
phongFS = unlines
[
"#version 430 core"
, "in vec3 vco;"
, "in vec3 vno;"
, "in vec2 vuv;"
, declUniform EyeSem "vec3 eye"
, declUniform (extendUniformSem PhongDiffMapSem) "sampler2D phongDiffMap"
, declUniform (extendUniformSem PhongSpecMapSem) "sampler2D phongSpecMap"
, declUniform (extendUniformSem PhongGlossMapSem) "sampler2D phongGlossMap"
-- ambient lighting
, declUniform LigAmbColSem "vec3 ligAmbCol"
, declUniform LigAmbPowSem "float ligAmbPow"
-- omni lights
, "struct Omni {"
, " vec3 pos;"
, " vec3 col;"
, " float pow;"
, " float rad;"
, " uint shadowLOD;"
, " uint shadowmapIndex;"
, " };"
-- shadows
, declUniform LowShadowmapsSem "samplerCubeArray lowShadowmaps"
, declUniform MediumShadowmapsSem "samplerCubeArray mediumShadowmaps"
, declUniform HighShadowmapsSem "samplerCubeArray highShadowmaps"
, declUniformBlock ligOmniSSBOBP "OmniBuffer { Omni ligs[]; } omniBuffer"
, declUniform LigOmniNbSem "uint ligOmniNb"
, "out vec4 frag;"
, "float sampleShadowmap(uint lod, uint index, vec3 ligDir) {"
, " switch (lod) {"
, " case 1u:"
, " return texture(lowShadowmaps, vec4(ligDir, float(index))).r;"
, " case 2u:"
, " return texture(mediumShadowmaps, vec4(ligDir, float(index))).r;"
, " case 3u:"
, " return texture(highShadowmaps, vec4(ligDir, float(index))).r;"
, " default:"
, " return 1.;"
, " }"
, "}"
, "float computeShadow(uint lod, uint index, vec3 ligDir, float ligRad, float distToLight) {"
, " float shadowDist = sampleShadowmap(lod, index, ligDir) * ligRad;"
, " float shadowBias = 0.01;"
, " return shadowDist + shadowBias >= distToLight ? 1. : 0.;"
, "}"
, "void main() {"
, " vec3 phongDiff = texture(phongDiffMap, vuv).rgb;"
, " vec3 phongSpec = texture(phongSpecMap, vuv).rgb;"
, " float phongGloss = texture(phongGlossMap, vuv).r;"
, " vec3 v = normalize(eye - vco);"
-- ambient lighting
, " vec3 ambient = ligAmbCol * phongDiff * ligAmbPow;"
-- omni lights
, " vec3 omni = vec3(0.,0.,0.);"
, " for (uint i = 0u; i < ligOmniNb; ++i) {"
-- lighting
, " vec3 ligCol = omniBuffer.ligs[i].col;"
, " float ligPow = omniBuffer.ligs[i].pow;"
, " float ligRad = omniBuffer.ligs[i].rad;"
, " uint shadowLOD = omniBuffer.ligs[i].shadowLOD;"
, " uint shadowmapIndex = omniBuffer.ligs[i].shadowmapIndex;"
, " vec3 ligToVertex = omniBuffer.ligs[i].pos - vco;"
, " vec3 ligDir = normalize(ligToVertex);"
, " float distToLight = length(ligToVertex);"
, " vec3 r = normalize(reflect(-ligDir,vno));"
, " vec3 spec = pow(max(0.,dot(r,v)), 1. + phongGloss * 512.) * ligCol * phongSpec;"
, " vec3 diff = max(0.,dot(vno,ligDir)) * (ligCol - spec) * phongDiff;"
, " float atten = ligPow / (pow(1. + distToLight/ligRad,2.));"
-- shadowing
, " float shadow = shadowLOD == 0u ? 1. : computeShadow(shadowLOD, shadowmapIndex, -ligDir, ligRad, distToLight);"
-- lighting * shadowing
, " omni += shadow * atten * (diff + spec);"
, " }"
, " frag = clamp(vec4(ambient + omni,1.), vec4(0.,0.,0.,0.), vec4(1.,1.,1.,1.));"
, "}"
]
getPhongMaterialManager :: (MonadIO m,MonadScoped IO m,MonadLogger m,MonadError Log m)
=> (String -> Wrap -> Filter -> Maybe CompareFunc -> Natural -> Natural -> m Texture2D)
-> m (String -> m PhongMaterial)
getPhongMaterialManager getTex2D = mkResourceManager $ \insert lkp ->
pure $ \name -> lkp name >>= \case
Just r -> pure r
Nothing -> do
PhongMaterialManifest dname sname gname <- load_ name
diff <- getTex2D dname ClampToEdge Linear Nothing 0 0
spec <- getTex2D sname ClampToEdge Linear Nothing 0 0
gloss <- getTex2D gname ClampToEdge Linear Nothing 0 0
let mat = PhongMaterial diff spec gloss
insert name mat
pure mat
|
phaazon/quaazar
|
src/Quaazar/Technics/Lighting/Phong.hs
|
bsd-3-clause
| 6,240 | 0 | 18 | 1,355 | 998 | 548 | 450 | -1 | -1 |
{-# LANGUAGE TypeFamilies #-}
module PrioritySync.Internal.Constrained
(Constrained(..))
where
import PrioritySync.Internal.UserData
import PrioritySync.Internal.RoomGroup
import PrioritySync.Internal.ClaimContext
import PrioritySync.Internal.Room
import PrioritySync.Internal.RoomConstraint
-- | Require that all 'RoomConstraint's be satisfied when acquiring a 'Room'.
data Constrained u = Constrained
type instance UserData (Constrained u) = u
instance RoomGroup (Constrained u) where
roomsOf = const []
instance (RoomConstraint u) => ClaimContext (Constrained u) where
type ClaimHandle (Constrained u) = ()
approveClaimsEntering _ cs = approveClaims cs >> return ()
approveClaimsExiting _ cs = approveClaims cs >> return ()
waitingAction _ () = return ()
|
clanehin/priority-sync
|
PrioritySync/Internal/Constrained.hs
|
bsd-3-clause
| 793 | 0 | 8 | 124 | 197 | 108 | 89 | 17 | 0 |
-- Copyright (c) 2015, Travis Bemann
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- o Redistributions of source code must retain the above copyright notice, this
-- list of conditions and the following disclaimer.
--
-- o Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
--
-- o Neither the name of the copyright holder nor the names of its
-- contributors may be used to endorse or promote products derived from
-- this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
{-# LANGUAGE OverloadedStrings #-}
module Network.IRC.Client.Amphibian.ChannelServer
(ChannelServer,
ChannelServerStopResponse,
new,
start,
stop,
waitStop)
where
import qualified Network.IRC.Client.Amphibian.ConnectionManager as CM
import Network.IRC.Client.Amphibian.Types
import Network.IRC.Client.Amphibian.Monad
import Network.IRC.Client.Amphibian.Commands
import Network.IRC.Client.Amphibian.Utility
import Data.Functor ((<$>))
import Control.Monad.IO.Class (liftIO)
import qualified Control.Monad as M
import Control.Concurrent.Async (Async,
async,
cancel)
import Control.Concurrent.STM (STM,
TVar,
TMVar,
TChan,
atomically,
orElse,
newTVar,
writeTVar,
readTVar,
newEmptyTMVar,
putTMVar,
readTMVar,
newBroadcastTChan,
dupTChan,
writeTChan,
peekTChan,
readTChan)
import Control.Concurrent.STM.TQueue (TQueue,
newTQueue,
writeTQueue,
readTQueue)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.UTF8 as BUTF8
import qualified Data.Text as T
import Text.Read (readMaybe)
import Data.Time.Clock (UTCTime,
getCurrentTime)
import Data.Time.Clock.POSIX (POSIXTime,
posixSecondsToUTCTime)
-- | Create a new channel server.
new :: STM ChannelServer
new = do
running <- newTVar Bool
actions <- newTQueue
return $ ChannelServer { chseRunning = running,
chseActions = actions }
-- | Start a channel server
start :: ChannelServer -> AM ()
start server = do
intf <- getInterface
join . liftIO . atomically $ do
running <- readTVar $ chseRunning server
if not running
then do
writeTVar (chseRunning server) True
I.registerChannelServer intf server
return . async $ runAM (runServer server)
else return $ return ()
-- | Stop channel server.
stop :: ChannelServer -> STM ChannelServerStopResponse
stop server = do
response <- ChannelServerStopResponse <$> newEmptyTMVar
writeTVar (chseActions server) (ChsaStop response)
return response
-- | Wait for channel server to stop.
waitStop :: ChannelServerStopResponse -> STM (Either Error ())
waitStop (ChannelServerStopResponse response) = readTMVar response
-- | Run channel server.
runServer :: ChannelServer -> AM ()
runServer server = do
intf <- getInterface
continue <- join . liftIO . atomically $ do
action <- readTQueue $ chseActions server
case action of
ChsaStartChannel channel (ChannelStartResponse response) -> do
active <- readTVar $ chanActive channel
if not active
then do
writeTVar (chanActive channel) True
I.registerChannel intf channel
putTMVar response $ Right ()
return $ do
async $ runAM (runChannel channel) intf
return True
else do
errorText <- I.lookupText intf "Channel is already started"
putTMVar response . Left $ Error [errorText]
return $ return True
ChsaStop -> do
I.unregisterChannelServer intf server
return $ return False
if continue
then runServer server
else return ()
-- | Run a channel.
runChannel :: Channel -> AM ()
runChannel channel = do
continue <- M.join . liftIO . atomically $ handleActions channel `orElse` handleEvents channel
if continue
then runChannel channel
else return ()
-- | Handle actions.
handleActions :: Channel -> STM (AM Bool)
handleActions channel = do
action <- readTQueue $ chanActions channel
case action of
ChanStop response -> doStop channel response
ChanJoin response -> doJoin channel response
ChanPart comment response -> doPart channel comment response
ChanMessage comment response -> doMessage channel comment response
ChanNotice comment response -> doNotice channel comment response
ChanSetTopic comment response -> doSetTopic channel comment response
-- | Carry out stop.
doStop :: Channel -> ChannelStopResponse -> STM (AM Bool)
doStop channel (ChannelStopResponse response) = do
I.unregisterChannel (chanInterface channel) channel
writeTVar (chanActive channel) False
putTMVar response $ Right ()
return $ return False
-- | Carry out join.
doJoin :: Channel -> ChannelJoinResponse -> STM (AM Bool)
doJoin channel (ChannelJoinResponse response) = do
autoJoin <- readTVar $ chanAutoJoin channel
if not autJoin
then do writeTVar (chanAutoJoin channel) True
name <- readTVar $ chanName channel
key <- readTVar $ chanKey channel
let parameters = maybe [name] (\key -> [name, key]) key
sendResponse <- CM.send (chanConnectionManager channel) $ IRCMessage { ircmPrefix = Nothing,
ircmCommand = cmd_JOIN,
ircmParameters = parameters,
ircmComment = Nothing }
let relay = liftIO . atomically $ do errorResponse <- waitSend sendResponse
putTMVar response errorResponse
return $ do intf <- getInterface
liftIO . async $ runAM relay intf
return True
else return $ do errorText <- lookupText "already joined channel"
liftIO . atomically . putTMVar response . Left $ Error [errorText]
return True
-- | Carry out part.
doPart :: Channel -> Maybe MessageComment -> ChannelPartResponse -> STM (AM Bool)
doPart channel comment (ChannelPartResponse response) = do
autoJoin <- readTVar $ chanAutoJoin channel
if autoJoin
then do
writeTVar (chanAutoJoin channel) False
writeTVar (chanJoined channel) False
name' <- readTVar $ chanName channel
sendResponse <- CM.send (chanConnectionManager channel) $ IRCMessage { ircmPrefix = Nothing,
ircmCommand = cmd_PART,
ircmParameters = [name],
ircmComment = comment }
let relay = liftIO . atomically $ do errorResponse <- waitSend sendResponse
putTMVar response errorResponse
return $ do intf <- getInterface
liftIO . async $ runAM relay intf
return True
else return $ do errorText <- lookupText "not joined channel"
liftIO . atomically . putTMVar response . Left $ Error [errorText]
return True
-- | Carry out message.
doMessage :: Channel -> MessageComment -> ChannelMessageResponse -> STM (AM Bool)
doMessage channel comment (ChannelMessageResponse response) = do
joined <- readTVar $ chanJoined channel
if joined
then do
nick <- CM.getNick $ chanConnectionManager channel
name' <- readTVar $ chanName channel
sendResponse <- CM.send (chanConnectionManager channel) $ IRCMessage { ircmPrefix = Nothing,
ircmCommand = cmd_PRIVMSG,
ircmParameters = [name].
ircmComment = Just comment }
let relay = liftIO . atomically $ do errorResponse <- waitSend sendResponse
putTMVar response errorResponse
return $ do intf <- getInterface
liftIO . async $ runAM relay intf
return True
else return $ do errorText <-lookupText "not in channel"
liftIO . atomically . putTMVar response . Left $ Error [errorText]
return True
-- | Carry out notice.
doNotice :: Channel -> MessageComment -> ChannelNoticeResponse -> STM (AM Bool)
doNotice channel comment (ChannelNoticeResponse response) = do
joined <- readTVar $ chanJoined channel
if joined
then do
nick <- CM.getNick $ chanConnectionManager channel
name' <- readTVar $ chanName chanel
sendResponse <- CM.send (chanConnectionManager channel) $ IRCMessage { ircmPrefix = Nothing,
ircmCommand = cmd_NOTICE,
ircmParameters = [name].
ircmComment = Just comment }
let relay = liftIO . atomically $ do errorResponse <- waitSend sendResponse
putTMVar response errorResponse
return $ do intf <- getInterface
liftIO . async $ runAM relay intf
return True
else return $ do errorText <-lookupText "not in channel"
liftIO . atomically . putTMVar response . Left $ Error [errorText]
return True
-- | Carry out set topic.
doSetTopic :: Channel -> MessageComment -> ChannelSetTopicResponse -> STM (AM Bool)
doSetTopic channel comment (ChannelSetTopicResponse response) = do
joined <- readTVar $ chanJoined channel
if joined
then do
name' <- readTVar $ chanName channel
sendResponse < CM.send (chanConnectionManager channel) $ IRCMessage { ircmPrefix = Nothing,
ircmCommand = cmd_TOPIC,
ircmParameters = [name],
ircmComment = Just comment }
writeTVar (chanTopic channel) (Just comment)
let relay = liftIO . atomically $ do errorResponse <- waitSend sendResponse
putTMVar response errorResponse
return $ do intf <- getInterface
liftIO . async $ runAM relay intf
return True
else return $ do errorText <- lookupText "not in channel"
liftIO . atomically . putTMVar response . Left $ Error [errorText]
return True
-- | Handle events.
handleEvents :: Channel -> STM (AM Bool)
handleEvents channel = do
autoJoin <- readTVar $ chanAutoJoin channel
if autoJoin
then do event <- CM.recv $ chanConnectionManager channel
case event of
ComaMessage message@(IRCMessage { ircmCommand = command })
| command == cmd_JOIN -> handleJoin channel message
| command == cmd_PART -> handlePart channel message
| command == cmd_PRIVMSG -> handlePrivmsg channel message
| command == cmd_NOTICE -> handleNotice channel message
| command == cmd_TOPIC -> handleTopic channel messsage
| command == rpl_NAMREPLY -> handleRplNamreply channel message
| command == rpl_ENDOFNAMES -> handleRplEndofnames channel message
| command == rpl_NOTOPIC -> handleRplNotopic channel message
| command == rpl_TOPIC -> handleRplTopic channel message
| command == rpl_TOPICWHOTIME -> handleRplTopicWhoTime channel message
| command == cmd_QUIT -> handleQuit channel message
ComaRecvNick oldNick newNick -> handleRecvNick channel oldNick newNick
ComaRecvCtcpRequest nick dest comment -> handleRecvCtcpRequest channel nick dest comment
ComaRecvCtcpReply nick dest comment -> handleRecvCtcpReply channel nick dest comment
ComaRecvSelfNick oldNick newNick -> handleRecvSelfNick channel oldNick newNick
ComaSelfMessage nick dest comment -> handleSelfMessage channel nick dest comment
ComaSelfNotice nick dest comment -> handleSelfNotice channel nick dest comment
ComaSelfCtcpRequest nick dest comment -> handleSelfCtcpRequest channel nick dest comment
ComaSelfCtcpReply nick dest comment -> handleSelfCtcpReply channel nick dest comment
ComaDisconnected error -> writeTChan (chanEvents channel) (ChanDisconnected error)
_ -> return $ return True
else return $ return True
-- | Handle JOIN message.
handleJoin :: Channel -> IRCMessage -> STM (AM Bool)
handleJoin channel message = do
currentNick <- CM.getNick $ chanConnectionManager channel
name' <- readTVar $ chanName channel
case (extractNick $ ircmPrefix message, ircmComment message) of
(Just nick, Just channelName)
| nick == currentNick && channelName == name' -> do
response <- readTVar $ chanJoinResponse channel
case response of
Just response -> do
putTMVar response (Right ())
Nothing -> return ()
writeTVar (chanJoined channel) True
writeTChan (chanEvents channel) ChanJoined
| channelName == chanName channel ->
writeTChan (chanEvents channel) (ChanRecvJoin nick (ircPrefix message))
CM.send (chanConnectionManager channel) $ IRCMessage { ircmPrefix = Nothing,
ircmCommand = cmd_NAMES,
ircmParameters = [chanName channel],
ircmComment = Nothing }
_ -> return ()
return $ return True
-- | Handle PART message.
handlePart :: Channel -> IRCMessage -> STM (AM Bool)
handlePart channel message = do
currentNick <- CM.getNick $ chanConnectionManager channel
name' <- readTVar $ chanName channel
case (extractNick $ ircmPrefix message, ircmParameters message) of
(Just nick, [channelName])
| nick == currentNick && name == channelName ->
writeTChan (chanEvents channel) (ChanParted $ ircmComment comment)
| name == channelName ->
writeTChan (chanEvents channel) (ChanRecvPart nick (ircmPrefix prefix) (ircmComment comment))
nicks <- filter (\(knownNick, _) -> knownNick /= nick) <$> readTVar $ chanNames channel
writeTVar (chanNames channel) nicks
_ -> return ()
return $ return True
-- | Handle PRIVMSG message.
handlePrivmsg :: Channel -> IRCMessage -> STM (AM Bool)
handlePrivmsg channel message = do
name' <- readTVar $ chanName channel
case (extractNick $ ircmPrefix message, ircmParameters message) of
(Just nick, [channelName])
| channelName == name' -> writeTChan (chanEvents channel) (ChanRecvMessage nick (ircmComment message))
_ -> return ()
return $ return True
-- | Handle NOTICE message.
handleNotice :: Channel -> IRCMessage -> STM (AM Bool)
handleNotice channel message = do
name' <- readTVar $ chanName channel
case (extractNick $ ircmPrefix message, ircmParameters message) of
(Just nick, [channelName])
| channelName == name' -> writeTChan (chanEvents channel) (ChanRecvNotice nick (ircmComment message))
_ -> return ()
return $ return True
-- | Handle TOPIC message.
handleTopic :: Channel -> IRCMessage -> STM (AM Bool)
handleTopic channel message = do
name' <- readTVar $ chanName channel
case (extractNick $ ircmPrefix message, ircmParameters message) of
(Just nick, [channelName])
| channelName == name' -> do
return $ do
time <- liftIO getCurrentTime
liftIO . atomically $ do
writeTVar (chanTopic channel) (Just $ ircmComment message)
writeTVar (chanTopicUser channel) (Just $ ircmPrefix message)
writeTVar (chanTopicTime channel) (Just time)
writeTChan (chanEvents channel) (ChanRecvTopic nick $ ircmComment message)
return True
_ -> return $ return True
-- | Handle rpl_NAMREPLY message.
handleRplNamrply :: Channel -> IRCMessage -> STM (AM Bool)
handleRplNamrply channel message = do
name' <- readTVar $ chanName channel
let parameters = ircmParameters message
case fromEnd 1 parameters of
Just name | name == name' -> do
case fromEnd 2 parameters of
Just channelType
| channelType == BUTF8.fromString "=" -> writeTVar (chanType channel) ChanPublic
| channelType == BUTF8.fromString "*" -> writeTVar (chanType channel) ChanPrivate
| channelType == BUTF8.fromString "@" -> writeTVar (chanType channel) ChanSecret
_ -> return ()
case ircmComment message of
Just comment -> do
let names = splitNames comment
oldNames <- readTVar $ chanNames channel
writeTVar (chanNames channel) (names ++ oldNames)
Nothing -> return ()
_ -> return ()
return $ return True
-- | Split names in a rpl_NAMREPLY message
splitNames :: MessageComment -> [(Nick, UserStatus)]
splitNames comment = splitNames' comment []
where splitNames' comment names =
let (name, rest) = BUTF8.break (== ' ') comment in
let names = if BUTF8.length name > 0
then case BUTF8.uncons name of
Just ('@', name) -> (name, UserOp) : names
Just ('%', name) -> (name, UserHalfOp) : names
Just ('+', name) -> (name, UserVoice) : names
_ -> (name, UserNormal) : names
else names
rest = BUTF8.drop 1 rest in
if BUTF8.length rest > 0
then splitNames' rest names
else reverse names
-- | Handle rpl_ENDOFNAMES message.
handleRplEndofnames :: Channel -> IRCMessage -> STM (AM Bool)
handleRplEndofnames channel message = do
case fromEnd 1 $ ircmParameters message of
Just name | name == chanName channel -> do
names <- readTVar $ chanNamesAccum channel
writeTVar (chanNames channel) names
writeTVar (chanNamesAccum channel) []
channelType <- readTVar $ chanType channel
writeTChan (chanEvents channel) (ChanType channelType)
writeTChan (chanEvents channel) (ChanNames names)
_ -> return ()
return $ return True
-- | Handle rpl_NOTOPIC message.
handleRplNotopic :: Channel -> IRCMessage -> STM (AM Bool)
handleRplNotopic channel message = do
case fromEnd 1 $ ircmParameters message of
Just name | name == chanName channel -> do
writeTVar (chanTopic channel) Nothing
writeTChan (chanEvents channel) ChanNoTopic
_ -> return ()
return $ return True
-- | Handle rpl_TOPIC message.
handleRplTopic :: Channel -> IRCMessage -> STM (AM Bool)
handleRplTopic channel message = do
case (fromEnd 1 $ ircmParameters message, ircmComment message) of
(Just name, Just comment) | name == chanName channel -> do
writeTVar (chanTopic channel) comment
writeTChan (chanEvents channel) (ChanTopic comment)
_ -> return ()
return $ return True
-- | Handle rpl_TOPICWHOTIME message.
handleRplTopicWhoTime :: Channel -> IRCMessage -> STM (AM Bool)
handleRplTopicWhoTime channel message = do
case ircmParameters message of
[name, user, timeString] | name == chanName channel -> do
case readMaybe timeString :: Maybe Integer of
Just time -> do
let time' = posixSecondsToUTCTime $ realToFrac time
writeTVar (chanTopicUser channel) user
writeTVar (chanTopicTime channel) time'
writeTChan (chanEvents channel) (ChanTopicWhoTime user time')
_ -> return ()
_ -> return ()
return $ return True
-- | Handle QUIT message.
handleQuit :: Channel -> IRCMessage -> STM (AM Bool)
handleQuit channel message = do
currentNick <- CM.getNick $ chanConnectionManager channel
names <- readTVar $ chanNames channel
case extractNick $ ircmPrefix message of
Just nick
| nick /= currentNick && nick `elem` map (\(knownNick, _) -> knownNick) names ->
writeTChan (chanEvents channel) (ChanRecvQuit nick (ircmPrefix prefix) (ircmComment comment))
names' <- filter (\(knownNick, _) -> knownNick /= nick) <$> names
writeTVar (chanNames channel) names'
_ -> return ()
return $ return True
-- | Handle received CTCP request message.
handleRecvCtcpRequest :: Channel -> Nick -> ChannelNameOrNick -> MessageComment -> STM (AM Bool)
handleRecvCtcpRequest channel nick dest comment = do
name' <- readTVar $ chanName channel
if dest == CnonChannelName name'
then writeTChan (chanEvents channel) (ChanRecvCtcpRequest nick comment)
else return ()
return $ return True
-- | Handle received CTCP reply message.
handleRecvCtcpReply :: Channel -> Nick -> ChannelNameOrNick -> MessageComment -> STM (AM Bool)
handleRecvCtcpReply channel nick dest comment = do
name' <- readTVar $ chanName channel
if dest == CnonChannelName name'
then writeTChan (chanEvents channel) (ChanRecvCtcpReply nick comment)
else return ()
return $ return True
-- | Handle received nick message.
handleRecvNick :: Channel -> Nick -> Nick -> STM (AM Bool)
handleRecvNick channel oldNick newNick = do
inChannel <- isNickInChannel channel oldNick
if inChannel
then do oldNicks <- readTVar $ chanNames channel
let newNicks = map (\(nick, status) -> (if nick == oldNick then newNick else nick, status)) oldNicks
writeTVar (chanNames channel) newNicks
writeTChan (chanEvents channel) (ChanRecvNick oldNick newNick)
else return ()
return $ return True
-- | Handle received self nick message.
handleRecvSelfNick :: Channel -> Nick -> Nick -> STM (AM Bool)
handleRecvSelfNick channel oldNick newNick = do
oldNicks <- readTVar $ chanNames channel
let newNicks = map (\(nick, status) -> (if nick == oldNick then newNick else nick, status)) oldNicks
writeTVar (chanNames channel) newNicks
writeTChan (chanEvents channel) (ChanRecvSelfNick oldNick newNick)
return $ return True
-- | Handle self message message.
handleSelfMessage :: Channel -> Nick -> ChannelNameOrNick -> MessageComment -> STM (AM Bool)
handleSelfMessage channel nick dest comment = do
name' <- readTVar $ chanName channel
if dest == CnonChannelName name'
then writeTChan (chanEvents channel) (ChanSelfMessage nick comment)
else return ()
return $ return True
-- | Handle self notice message.
handleSelfNotice :: Channel -> Nick -> ChannelNameOrNick -> MessageComment -> STM (AM Bool)
handleSelfNotice channel message = do
name' <- readTVar $ chanName channel
if dest == CnonChannelName name'
then writeTChan (chanEvents channel) (ChanSelfNotice nick comment)
else return ()
return $ return True
-- | Handle self CTCP request message.
handleSelfCtcpRequest :: Channel -> Nick -> ChannelNameOrNick -> MessageComment -> STM (AM Bool)
handleSelfCtcpRequest channel nick dest comment = do
name' <- readTVar $ chanName channel
if dest == CnonChannelName name'
then writeTChan (chanEvents channel) (ChanSelfCtcpRequest nick comment)
else return ()
return $ return True
-- | Handle self CTCP reply message.
handleSelfCtcpReply :: Channel -> Nick -> ChannelNameOrNick -> MessageComment -> STM (AM Bool)
handleSelfCtcpReply channel nick dest comment = do
name' <- readTVar $ chanName channel
if dest == CnonChannelName name'
then writeTChan (chanEvents channel) (ChanSelfCtcpReply nick comment)
else return ()
return $ return True
-- | Get the item in a position from the end of a list
fromEnd :: Int -> [a] -> Maybe a
fromEnd position list
| length list < position = Nothing
| otherwise = case drop ((length list) - position) list of
item : _ -> Just item
_ -> Nothing
-- | Get whether a nick is in a channel.
isNickInChannel :: Channel -> Nick -> STM Bool
isNickInChannel channel nick = elem nick <$> map fst <$> readTVar $ chanNames channel
|
tabemann/amphibian
|
src_old/Network/IRC/Client/Amphibian/ChannelServer.hs
|
bsd-3-clause
| 25,961 | 4 | 25 | 7,703 | 6,740 | 3,254 | 3,486 | -1 | -1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
The @match@ function
-}
{-# LANGUAGE CPP #-}
module Match ( match, matchEquations, matchWrapper, matchSimply, matchSinglePat ) where
#include "HsVersions.h"
import {-#SOURCE#-} DsExpr (dsLExpr, dsSyntaxExpr)
import DynFlags
import HsSyn
import TcHsSyn
import TcEvidence
import TcRnMonad
import Check
import CoreSyn
import Literal
import CoreUtils
import MkCore
import DsMonad
import DsBinds
import DsGRHSs
import DsUtils
import Id
import ConLike
import DataCon
import PatSyn
import MatchCon
import MatchLit
import Type
import Coercion ( eqCoercion )
import TcType ( toTcTypeBag )
import TyCon( isNewTyCon )
import TysWiredIn
import ListSetOps
import SrcLoc
import Maybes
import Util
import Name
import Outputable
import BasicTypes ( isGenerated )
import Unique
import UniqDFM
import Control.Monad( when, unless )
import qualified Data.Map as Map
{-
************************************************************************
* *
The main matching function
* *
************************************************************************
The function @match@ is basically the same as in the Wadler chapter,
except it is monadised, to carry around the name supply, info about
annotations, etc.
Notes on @match@'s arguments, assuming $m$ equations and $n$ patterns:
\begin{enumerate}
\item
A list of $n$ variable names, those variables presumably bound to the
$n$ expressions being matched against the $n$ patterns. Using the
list of $n$ expressions as the first argument showed no benefit and
some inelegance.
\item
The second argument, a list giving the ``equation info'' for each of
the $m$ equations:
\begin{itemize}
\item
the $n$ patterns for that equation, and
\item
a list of Core bindings [@(Id, CoreExpr)@ pairs] to be ``stuck on
the front'' of the matching code, as in:
\begin{verbatim}
let <binds>
in <matching-code>
\end{verbatim}
\item
and finally: (ToDo: fill in)
The right way to think about the ``after-match function'' is that it
is an embryonic @CoreExpr@ with a ``hole'' at the end for the
final ``else expression''.
\end{itemize}
There is a type synonym, @EquationInfo@, defined in module @DsUtils@.
An experiment with re-ordering this information about equations (in
particular, having the patterns available in column-major order)
showed no benefit.
\item
A default expression---what to evaluate if the overall pattern-match
fails. This expression will (almost?) always be
a measly expression @Var@, unless we know it will only be used once
(as we do in @glue_success_exprs@).
Leaving out this third argument to @match@ (and slamming in lots of
@Var "fail"@s) is a positively {\em bad} idea, because it makes it
impossible to share the default expressions. (Also, it stands no
chance of working in our post-upheaval world of @Locals@.)
\end{enumerate}
Note: @match@ is often called via @matchWrapper@ (end of this module),
a function that does much of the house-keeping that goes with a call
to @match@.
It is also worth mentioning the {\em typical} way a block of equations
is desugared with @match@. At each stage, it is the first column of
patterns that is examined. The steps carried out are roughly:
\begin{enumerate}
\item
Tidy the patterns in column~1 with @tidyEqnInfo@ (this may add
bindings to the second component of the equation-info):
\begin{itemize}
\item
Remove the `as' patterns from column~1.
\item
Make all constructor patterns in column~1 into @ConPats@, notably
@ListPats@ and @TuplePats@.
\item
Handle any irrefutable (or ``twiddle'') @LazyPats@.
\end{itemize}
\item
Now {\em unmix} the equations into {\em blocks} [w\/ local function
@unmix_eqns@], in which the equations in a block all have variable
patterns in column~1, or they all have constructor patterns in ...
(see ``the mixture rule'' in SLPJ).
\item
Call @matchEqnBlock@ on each block of equations; it will do the
appropriate thing for each kind of column-1 pattern, usually ending up
in a recursive call to @match@.
\end{enumerate}
We are a little more paranoid about the ``empty rule'' (SLPJ, p.~87)
than the Wadler-chapter code for @match@ (p.~93, first @match@ clause).
And gluing the ``success expressions'' together isn't quite so pretty.
This (more interesting) clause of @match@ uses @tidy_and_unmix_eqns@
(a)~to get `as'- and `twiddle'-patterns out of the way (tidying), and
(b)~to do ``the mixture rule'' (SLPJ, p.~88) [which really {\em
un}mixes the equations], producing a list of equation-info
blocks, each block having as its first column of patterns either all
constructors, or all variables (or similar beasts), etc.
@match_unmixed_eqn_blks@ simply takes the place of the @foldr@ in the
Wadler-chapter @match@ (p.~93, last clause), and @match_unmixed_blk@
corresponds roughly to @matchVarCon@.
-}
match :: [Id] -- Variables rep\'ing the exprs we\'re matching with
-> Type -- Type of the case expression
-> [EquationInfo] -- Info about patterns, etc. (type synonym below)
-> DsM MatchResult -- Desugared result!
match [] ty eqns
= ASSERT2( not (null eqns), ppr ty )
return (foldr1 combineMatchResults match_results)
where
match_results = [ ASSERT( null (eqn_pats eqn) )
eqn_rhs eqn
| eqn <- eqns ]
match vars@(v:_) ty eqns -- Eqns *can* be empty
= do { dflags <- getDynFlags
-- Tidy the first pattern, generating
-- auxiliary bindings if necessary
; (aux_binds, tidy_eqns) <- mapAndUnzipM (tidyEqnInfo v) eqns
-- Group the equations and match each group in turn
; let grouped = groupEquations dflags tidy_eqns
-- print the view patterns that are commoned up to help debug
; whenDOptM Opt_D_dump_view_pattern_commoning (debug grouped)
; match_results <- match_groups grouped
; return (adjustMatchResult (foldr (.) id aux_binds) $
foldr1 combineMatchResults match_results) }
where
dropGroup :: [(PatGroup,EquationInfo)] -> [EquationInfo]
dropGroup = map snd
match_groups :: [[(PatGroup,EquationInfo)]] -> DsM [MatchResult]
-- Result list of [MatchResult] is always non-empty
match_groups [] = matchEmpty v ty
match_groups gs = mapM match_group gs
match_group :: [(PatGroup,EquationInfo)] -> DsM MatchResult
match_group [] = panic "match_group"
match_group eqns@((group,_) : _)
= case group of
PgCon {} -> matchConFamily vars ty (subGroupUniq [(c,e) | (PgCon c, e) <- eqns])
PgSyn {} -> matchPatSyn vars ty (dropGroup eqns)
PgLit {} -> matchLiterals vars ty (subGroupOrd [(l,e) | (PgLit l, e) <- eqns])
PgAny -> matchVariables vars ty (dropGroup eqns)
PgN {} -> matchNPats vars ty (dropGroup eqns)
PgNpK {} -> matchNPlusKPats vars ty (dropGroup eqns)
PgBang -> matchBangs vars ty (dropGroup eqns)
PgCo {} -> matchCoercion vars ty (dropGroup eqns)
PgView {} -> matchView vars ty (dropGroup eqns)
PgOverloadedList -> matchOverloadedList vars ty (dropGroup eqns)
-- FIXME: we should also warn about view patterns that should be
-- commoned up but are not
-- print some stuff to see what's getting grouped
-- use -dppr-debug to see the resolution of overloaded literals
debug eqns =
let gs = map (\group -> foldr (\ (p,_) -> \acc ->
case p of PgView e _ -> e:acc
_ -> acc) [] group) eqns
maybeWarn [] = return ()
maybeWarn l = warnDs NoReason (vcat l)
in
maybeWarn $ (map (\g -> text "Putting these view expressions into the same case:" <+> (ppr g))
(filter (not . null) gs))
matchEmpty :: Id -> Type -> DsM [MatchResult]
-- See Note [Empty case expressions]
matchEmpty var res_ty
= return [MatchResult CanFail mk_seq]
where
mk_seq fail = return $ mkWildCase (Var var) (idType var) res_ty
[(DEFAULT, [], fail)]
matchVariables :: [Id] -> Type -> [EquationInfo] -> DsM MatchResult
-- Real true variables, just like in matchVar, SLPJ p 94
-- No binding to do: they'll all be wildcards by now (done in tidy)
matchVariables (_:vars) ty eqns = match vars ty (shiftEqns eqns)
matchVariables [] _ _ = panic "matchVariables"
matchBangs :: [Id] -> Type -> [EquationInfo] -> DsM MatchResult
matchBangs (var:vars) ty eqns
= do { match_result <- match (var:vars) ty $
map (decomposeFirstPat getBangPat) eqns
; return (mkEvalMatchResult var ty match_result) }
matchBangs [] _ _ = panic "matchBangs"
matchCoercion :: [Id] -> Type -> [EquationInfo] -> DsM MatchResult
-- Apply the coercion to the match variable and then match that
matchCoercion (var:vars) ty (eqns@(eqn1:_))
= do { let CoPat co pat _ = firstPat eqn1
; let pat_ty' = hsPatType pat
; var' <- newUniqueId var pat_ty'
; match_result <- match (var':vars) ty $
map (decomposeFirstPat getCoPat) eqns
; rhs' <- dsHsWrapper co (Var var)
; return (mkCoLetMatchResult (NonRec var' rhs') match_result) }
matchCoercion _ _ _ = panic "matchCoercion"
matchView :: [Id] -> Type -> [EquationInfo] -> DsM MatchResult
-- Apply the view function to the match variable and then match that
matchView (var:vars) ty (eqns@(eqn1:_))
= do { -- we could pass in the expr from the PgView,
-- but this needs to extract the pat anyway
-- to figure out the type of the fresh variable
let ViewPat viewExpr (L _ pat) _ = firstPat eqn1
-- do the rest of the compilation
; let pat_ty' = hsPatType pat
; var' <- newUniqueId var pat_ty'
; match_result <- match (var':vars) ty $
map (decomposeFirstPat getViewPat) eqns
-- compile the view expressions
; viewExpr' <- dsLExpr viewExpr
; return (mkViewMatchResult var'
(mkCoreAppDs (text "matchView") viewExpr' (Var var))
match_result) }
matchView _ _ _ = panic "matchView"
matchOverloadedList :: [Id] -> Type -> [EquationInfo] -> DsM MatchResult
matchOverloadedList (var:vars) ty (eqns@(eqn1:_))
-- Since overloaded list patterns are treated as view patterns,
-- the code is roughly the same as for matchView
= do { let ListPat _ elt_ty (Just (_,e)) = firstPat eqn1
; var' <- newUniqueId var (mkListTy elt_ty) -- we construct the overall type by hand
; match_result <- match (var':vars) ty $
map (decomposeFirstPat getOLPat) eqns -- getOLPat builds the pattern inside as a non-overloaded version of the overloaded list pattern
; e' <- dsSyntaxExpr e [Var var]
; return (mkViewMatchResult var' e' match_result) }
matchOverloadedList _ _ _ = panic "matchOverloadedList"
-- decompose the first pattern and leave the rest alone
decomposeFirstPat :: (Pat Id -> Pat Id) -> EquationInfo -> EquationInfo
decomposeFirstPat extractpat (eqn@(EqnInfo { eqn_pats = pat : pats }))
= eqn { eqn_pats = extractpat pat : pats}
decomposeFirstPat _ _ = panic "decomposeFirstPat"
getCoPat, getBangPat, getViewPat, getOLPat :: Pat Id -> Pat Id
getCoPat (CoPat _ pat _) = pat
getCoPat _ = panic "getCoPat"
getBangPat (BangPat pat ) = unLoc pat
getBangPat _ = panic "getBangPat"
getViewPat (ViewPat _ pat _) = unLoc pat
getViewPat _ = panic "getViewPat"
getOLPat (ListPat pats ty (Just _)) = ListPat pats ty Nothing
getOLPat _ = panic "getOLPat"
{-
Note [Empty case alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The list of EquationInfo can be empty, arising from
case x of {} or \case {}
In that situation we desugar to
case x of { _ -> error "pattern match failure" }
The *desugarer* isn't certain whether there really should be no
alternatives, so it adds a default case, as it always does. A later
pass may remove it if it's inaccessible. (See also Note [Empty case
alternatives] in CoreSyn.)
We do *not* desugar simply to
error "empty case"
or some such, because 'x' might be bound to (error "hello"), in which
case we want to see that "hello" exception, not (error "empty case").
See also Note [Case elimination: lifted case] in Simplify.
************************************************************************
* *
Tidying patterns
* *
************************************************************************
Tidy up the leftmost pattern in an @EquationInfo@, given the variable @v@
which will be scrutinised. This means:
\begin{itemize}
\item
Replace variable patterns @x@ (@x /= v@) with the pattern @_@,
together with the binding @x = v@.
\item
Replace the `as' pattern @x@@p@ with the pattern p and a binding @x = v@.
\item
Removing lazy (irrefutable) patterns (you don't want to know...).
\item
Converting explicit tuple-, list-, and parallel-array-pats into ordinary
@ConPats@.
\item
Convert the literal pat "" to [].
\end{itemize}
The result of this tidying is that the column of patterns will include
{\em only}:
\begin{description}
\item[@WildPats@:]
The @VarPat@ information isn't needed any more after this.
\item[@ConPats@:]
@ListPats@, @TuplePats@, etc., are all converted into @ConPats@.
\item[@LitPats@ and @NPats@:]
@LitPats@/@NPats@ of ``known friendly types'' (Int, Char,
Float, Double, at least) are converted to unboxed form; e.g.,
\tr{(NPat (HsInt i) _ _)} is converted to:
\begin{verbatim}
(ConPat I# _ _ [LitPat (HsIntPrim i)])
\end{verbatim}
\end{description}
-}
tidyEqnInfo :: Id -> EquationInfo
-> DsM (DsWrapper, EquationInfo)
-- DsM'd because of internal call to dsLHsBinds
-- and mkSelectorBinds.
-- "tidy1" does the interesting stuff, looking at
-- one pattern and fiddling the list of bindings.
--
-- POST CONDITION: head pattern in the EqnInfo is
-- WildPat
-- ConPat
-- NPat
-- LitPat
-- NPlusKPat
-- but no other
tidyEqnInfo _ (EqnInfo { eqn_pats = [] })
= panic "tidyEqnInfo"
tidyEqnInfo v eqn@(EqnInfo { eqn_pats = pat : pats })
= do { (wrap, pat') <- tidy1 v pat
; return (wrap, eqn { eqn_pats = do pat' : pats }) }
tidy1 :: Id -- The Id being scrutinised
-> Pat Id -- The pattern against which it is to be matched
-> DsM (DsWrapper, -- Extra bindings to do before the match
Pat Id) -- Equivalent pattern
-------------------------------------------------------
-- (pat', mr') = tidy1 v pat mr
-- tidies the *outer level only* of pat, giving pat'
-- It eliminates many pattern forms (as-patterns, variable patterns,
-- list patterns, etc) yielding one of:
-- WildPat
-- ConPatOut
-- LitPat
-- NPat
-- NPlusKPat
tidy1 v (ParPat pat) = tidy1 v (unLoc pat)
tidy1 v (SigPatOut pat _) = tidy1 v (unLoc pat)
tidy1 _ (WildPat ty) = return (idDsWrapper, WildPat ty)
tidy1 v (BangPat (L l p)) = tidy_bang_pat v l p
-- case v of { x -> mr[] }
-- = case v of { _ -> let x=v in mr[] }
tidy1 v (VarPat (L _ var))
= return (wrapBind var v, WildPat (idType var))
-- case v of { x@p -> mr[] }
-- = case v of { p -> let x=v in mr[] }
tidy1 v (AsPat (L _ var) pat)
= do { (wrap, pat') <- tidy1 v (unLoc pat)
; return (wrapBind var v . wrap, pat') }
{- now, here we handle lazy patterns:
tidy1 v ~p bs = (v, v1 = case v of p -> v1 :
v2 = case v of p -> v2 : ... : bs )
where the v_i's are the binders in the pattern.
ToDo: in "v_i = ... -> v_i", are the v_i's really the same thing?
The case expr for v_i is just: match [v] [(p, [], \ x -> Var v_i)] any_expr
-}
tidy1 v (LazyPat pat)
= do { (_,sel_prs) <- mkSelectorBinds [] pat (Var v)
; let sel_binds = [NonRec b rhs | (b,rhs) <- sel_prs]
; return (mkCoreLets sel_binds, WildPat (idType v)) }
tidy1 _ (ListPat pats ty Nothing)
= return (idDsWrapper, unLoc list_ConPat)
where
list_ConPat = foldr (\ x y -> mkPrefixConPat consDataCon [x, y] [ty])
(mkNilPat ty)
pats
-- Introduce fake parallel array constructors to be able to handle parallel
-- arrays with the existing machinery for constructor pattern
tidy1 _ (PArrPat pats ty)
= return (idDsWrapper, unLoc parrConPat)
where
arity = length pats
parrConPat = mkPrefixConPat (parrFakeCon arity) pats [ty]
tidy1 _ (TuplePat pats boxity tys)
= return (idDsWrapper, unLoc tuple_ConPat)
where
arity = length pats
tuple_ConPat = mkPrefixConPat (tupleDataCon boxity arity) pats tys
-- LitPats: we *might* be able to replace these w/ a simpler form
tidy1 _ (LitPat lit)
= return (idDsWrapper, tidyLitPat lit)
-- NPats: we *might* be able to replace these w/ a simpler form
tidy1 _ (NPat (L _ lit) mb_neg eq ty)
= return (idDsWrapper, tidyNPat tidyLitPat lit mb_neg eq ty)
-- Everything else goes through unchanged...
tidy1 _ non_interesting_pat
= return (idDsWrapper, non_interesting_pat)
--------------------
tidy_bang_pat :: Id -> SrcSpan -> Pat Id -> DsM (DsWrapper, Pat Id)
-- Discard par/sig under a bang
tidy_bang_pat v _ (ParPat (L l p)) = tidy_bang_pat v l p
tidy_bang_pat v _ (SigPatOut (L l p) _) = tidy_bang_pat v l p
-- Push the bang-pattern inwards, in the hope that
-- it may disappear next time
tidy_bang_pat v l (AsPat v' p) = tidy1 v (AsPat v' (L l (BangPat p)))
tidy_bang_pat v l (CoPat w p t) = tidy1 v (CoPat w (BangPat (L l p)) t)
-- Discard bang around strict pattern
tidy_bang_pat v _ p@(LitPat {}) = tidy1 v p
tidy_bang_pat v _ p@(ListPat {}) = tidy1 v p
tidy_bang_pat v _ p@(TuplePat {}) = tidy1 v p
tidy_bang_pat v _ p@(PArrPat {}) = tidy1 v p
-- Data/newtype constructors
tidy_bang_pat v l p@(ConPatOut { pat_con = L _ (RealDataCon dc), pat_args = args })
| isNewTyCon (dataConTyCon dc) -- Newtypes: push bang inwards (Trac #9844)
= tidy1 v (p { pat_args = push_bang_into_newtype_arg l args })
| otherwise -- Data types: discard the bang
= tidy1 v p
-------------------
-- Default case, leave the bang there:
-- VarPat,
-- LazyPat,
-- WildPat,
-- ViewPat,
-- pattern synonyms (ConPatOut with PatSynCon)
-- NPat,
-- NPlusKPat
--
-- For LazyPat, remember that it's semantically like a VarPat
-- i.e. !(~p) is not like ~p, or p! (Trac #8952)
--
-- NB: SigPatIn, ConPatIn should not happen
tidy_bang_pat _ l p = return (idDsWrapper, BangPat (L l p))
-------------------
push_bang_into_newtype_arg :: SrcSpan -> HsConPatDetails Id -> HsConPatDetails Id
-- See Note [Bang patterns and newtypes]
-- We are transforming !(N p) into (N !p)
push_bang_into_newtype_arg l (PrefixCon (arg:args))
= ASSERT( null args)
PrefixCon [L l (BangPat arg)]
push_bang_into_newtype_arg l (RecCon rf)
| HsRecFields { rec_flds = L lf fld : flds } <- rf
, HsRecField { hsRecFieldArg = arg } <- fld
= ASSERT( null flds)
RecCon (rf { rec_flds = [L lf (fld { hsRecFieldArg = L l (BangPat arg) })] })
push_bang_into_newtype_arg _ cd
= pprPanic "push_bang_into_newtype_arg" (pprConArgs cd)
{-
Note [Bang patterns and newtypes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For the pattern !(Just pat) we can discard the bang, because
the pattern is strict anyway. But for !(N pat), where
newtype NT = N Int
we definitely can't discard the bang. Trac #9844.
So what we do is to push the bang inwards, in the hope that it will
get discarded there. So we transform
!(N pat) into (N !pat)
\noindent
{\bf Previous @matchTwiddled@ stuff:}
Now we get to the only interesting part; note: there are choices for
translation [from Simon's notes]; translation~1:
\begin{verbatim}
deTwiddle [s,t] e
\end{verbatim}
returns
\begin{verbatim}
[ w = e,
s = case w of [s,t] -> s
t = case w of [s,t] -> t
]
\end{verbatim}
Here \tr{w} is a fresh variable, and the \tr{w}-binding prevents multiple
evaluation of \tr{e}. An alternative translation (No.~2):
\begin{verbatim}
[ w = case e of [s,t] -> (s,t)
s = case w of (s,t) -> s
t = case w of (s,t) -> t
]
\end{verbatim}
************************************************************************
* *
\subsubsection[improved-unmixing]{UNIMPLEMENTED idea for improved unmixing}
* *
************************************************************************
We might be able to optimise unmixing when confronted by
only-one-constructor-possible, of which tuples are the most notable
examples. Consider:
\begin{verbatim}
f (a,b,c) ... = ...
f d ... (e:f) = ...
f (g,h,i) ... = ...
f j ... = ...
\end{verbatim}
This definition would normally be unmixed into four equation blocks,
one per equation. But it could be unmixed into just one equation
block, because if the one equation matches (on the first column),
the others certainly will.
You have to be careful, though; the example
\begin{verbatim}
f j ... = ...
-------------------
f (a,b,c) ... = ...
f d ... (e:f) = ...
f (g,h,i) ... = ...
\end{verbatim}
{\em must} be broken into two blocks at the line shown; otherwise, you
are forcing unnecessary evaluation. In any case, the top-left pattern
always gives the cue. You could then unmix blocks into groups of...
\begin{description}
\item[all variables:]
As it is now.
\item[constructors or variables (mixed):]
Need to make sure the right names get bound for the variable patterns.
\item[literals or variables (mixed):]
Presumably just a variant on the constructor case (as it is now).
\end{description}
************************************************************************
* *
* matchWrapper: a convenient way to call @match@ *
* *
************************************************************************
\subsection[matchWrapper]{@matchWrapper@: a convenient interface to @match@}
Calls to @match@ often involve similar (non-trivial) work; that work
is collected here, in @matchWrapper@. This function takes as
arguments:
\begin{itemize}
\item
Typchecked @Matches@ (of a function definition, or a case or lambda
expression)---the main input;
\item
An error message to be inserted into any (runtime) pattern-matching
failure messages.
\end{itemize}
As results, @matchWrapper@ produces:
\begin{itemize}
\item
A list of variables (@Locals@) that the caller must ``promise'' to
bind to appropriate values; and
\item
a @CoreExpr@, the desugared output (main result).
\end{itemize}
The main actions of @matchWrapper@ include:
\begin{enumerate}
\item
Flatten the @[TypecheckedMatch]@ into a suitable list of
@EquationInfo@s.
\item
Create as many new variables as there are patterns in a pattern-list
(in any one of the @EquationInfo@s).
\item
Create a suitable ``if it fails'' expression---a call to @error@ using
the error-string input; the {\em type} of this fail value can be found
by examining one of the RHS expressions in one of the @EquationInfo@s.
\item
Call @match@ with all of this information!
\end{enumerate}
-}
matchWrapper :: HsMatchContext Name -- For shadowing warning messages
-> Maybe (LHsExpr Id) -- The scrutinee, if we check a case expr
-> MatchGroup Id (LHsExpr Id) -- Matches being desugared
-> DsM ([Id], CoreExpr) -- Results
{-
There is one small problem with the Lambda Patterns, when somebody
writes something similar to:
\begin{verbatim}
(\ (x:xs) -> ...)
\end{verbatim}
he/she don't want a warning about incomplete patterns, that is done with
the flag @opt_WarnSimplePatterns@.
This problem also appears in the:
\begin{itemize}
\item @do@ patterns, but if the @do@ can fail
it creates another equation if the match can fail
(see @DsExpr.doDo@ function)
\item @let@ patterns, are treated by @matchSimply@
List Comprension Patterns, are treated by @matchSimply@ also
\end{itemize}
We can't call @matchSimply@ with Lambda patterns,
due to the fact that lambda patterns can have more than
one pattern, and match simply only accepts one pattern.
JJQC 30-Nov-1997
-}
matchWrapper ctxt mb_scr (MG { mg_alts = L _ matches
, mg_arg_tys = arg_tys
, mg_res_ty = rhs_ty
, mg_origin = origin })
= do { dflags <- getDynFlags
; locn <- getSrcSpanDs
; new_vars <- case matches of
[] -> mapM newSysLocalDs arg_tys
(m:_) -> selectMatchVars (map unLoc (hsLMatchPats m))
; eqns_info <- mapM (mk_eqn_info new_vars) matches
-- pattern match check warnings
; unless (isGenerated origin) $
when (isAnyPmCheckEnabled dflags (DsMatchContext ctxt locn)) $
addTmCsDs (genCaseTmCs1 mb_scr new_vars) $
-- See Note [Type and Term Equality Propagation]
checkMatches dflags (DsMatchContext ctxt locn) new_vars matches
; result_expr <- handleWarnings $
matchEquations ctxt new_vars eqns_info rhs_ty
; return (new_vars, result_expr) }
where
mk_eqn_info vars (L _ (Match _ pats _ grhss))
= do { dflags <- getDynFlags
; let upats = map (unLoc . decideBangHood dflags) pats
dicts = toTcTypeBag (collectEvVarsPats upats) -- Only TcTyVars
; tm_cs <- genCaseTmCs2 mb_scr upats vars
; match_result <- addDictsDs dicts $ -- See Note [Type and Term Equality Propagation]
addTmCsDs tm_cs $ -- See Note [Type and Term Equality Propagation]
dsGRHSs ctxt upats grhss rhs_ty
; return (EqnInfo { eqn_pats = upats, eqn_rhs = match_result}) }
handleWarnings = if isGenerated origin
then discardWarningsDs
else id
matchEquations :: HsMatchContext Name
-> [Id] -> [EquationInfo] -> Type
-> DsM CoreExpr
matchEquations ctxt vars eqns_info rhs_ty
= do { let error_doc = matchContextErrString ctxt
; match_result <- match vars rhs_ty eqns_info
; fail_expr <- mkErrorAppDs pAT_ERROR_ID rhs_ty error_doc
; extractMatchResult match_result fail_expr }
{-
************************************************************************
* *
\subsection[matchSimply]{@matchSimply@: match a single expression against a single pattern}
* *
************************************************************************
@mkSimpleMatch@ is a wrapper for @match@ which deals with the
situation where we want to match a single expression against a single
pattern. It returns an expression.
-}
matchSimply :: CoreExpr -- Scrutinee
-> HsMatchContext Name -- Match kind
-> LPat Id -- Pattern it should match
-> CoreExpr -- Return this if it matches
-> CoreExpr -- Return this if it doesn't
-> DsM CoreExpr
-- Do not warn about incomplete patterns; see matchSinglePat comments
matchSimply scrut hs_ctx pat result_expr fail_expr = do
let
match_result = cantFailMatchResult result_expr
rhs_ty = exprType fail_expr
-- Use exprType of fail_expr, because won't refine in the case of failure!
match_result' <- matchSinglePat scrut hs_ctx pat rhs_ty match_result
extractMatchResult match_result' fail_expr
matchSinglePat :: CoreExpr -> HsMatchContext Name -> LPat Id
-> Type -> MatchResult -> DsM MatchResult
-- matchSinglePat does not warn about incomplete patterns
-- Used for things like [ e | pat <- stuff ], where
-- incomplete patterns are just fine
matchSinglePat (Var var) ctx pat ty match_result
| isLocalId var
= match_single_pat_var var ctx pat ty match_result
matchSinglePat scrut hs_ctx pat ty match_result
= do { var <- selectSimpleMatchVarL pat
; match_result' <- match_single_pat_var var hs_ctx pat ty match_result
; return (adjustMatchResult (bindNonRec var scrut) match_result') }
match_single_pat_var :: Id -> HsMatchContext Name -> LPat Id
-> Type -> MatchResult -> DsM MatchResult
-- matchSinglePat ensures that the scrutinee is a variable
-- and then calls match_single_pat_var
match_single_pat_var var ctx pat ty match_result
= do { dflags <- getDynFlags
; locn <- getSrcSpanDs
-- Pattern match check warnings
; checkSingle dflags (DsMatchContext ctx locn) var (unLoc pat)
; let eqn_info = EqnInfo { eqn_pats = [unLoc (decideBangHood dflags pat)]
, eqn_rhs = match_result }
; match [var] ty [eqn_info] }
{-
************************************************************************
* *
Pattern classification
* *
************************************************************************
-}
data PatGroup
= PgAny -- Immediate match: variables, wildcards,
-- lazy patterns
| PgCon DataCon -- Constructor patterns (incl list, tuple)
| PgSyn PatSyn [Type] -- See Note [Pattern synonym groups]
| PgLit Literal -- Literal patterns
| PgN Literal -- Overloaded literals
| PgNpK Literal -- n+k patterns
| PgBang -- Bang patterns
| PgCo Type -- Coercion patterns; the type is the type
-- of the pattern *inside*
| PgView (LHsExpr Id) -- view pattern (e -> p):
-- the LHsExpr is the expression e
Type -- the Type is the type of p (equivalently, the result type of e)
| PgOverloadedList
groupEquations :: DynFlags -> [EquationInfo] -> [[(PatGroup, EquationInfo)]]
-- If the result is of form [g1, g2, g3],
-- (a) all the (pg,eq) pairs in g1 have the same pg
-- (b) none of the gi are empty
-- The ordering of equations is unchanged
groupEquations dflags eqns
= runs same_gp [(patGroup dflags (firstPat eqn), eqn) | eqn <- eqns]
where
same_gp :: (PatGroup,EquationInfo) -> (PatGroup,EquationInfo) -> Bool
(pg1,_) `same_gp` (pg2,_) = pg1 `sameGroup` pg2
subGroup :: (m -> [[EquationInfo]]) -- Map.elems
-> m -- Map.empty
-> (a -> m -> Maybe [EquationInfo]) -- Map.lookup
-> (a -> [EquationInfo] -> m -> m) -- Map.insert
-> [(a, EquationInfo)] -> [[EquationInfo]]
-- Input is a particular group. The result sub-groups the
-- equations by with particular constructor, literal etc they match.
-- Each sub-list in the result has the same PatGroup
-- See Note [Take care with pattern order]
-- Parameterized by map operations to allow different implementations
-- and constraints, eg. types without Ord instance.
subGroup elems empty lookup insert group
= map reverse $ elems $ foldl accumulate empty group
where
accumulate pg_map (pg, eqn)
= case lookup pg pg_map of
Just eqns -> insert pg (eqn:eqns) pg_map
Nothing -> insert pg [eqn] pg_map
-- pg_map :: Map a [EquationInfo]
-- Equations seen so far in reverse order of appearance
subGroupOrd :: Ord a => [(a, EquationInfo)] -> [[EquationInfo]]
subGroupOrd = subGroup Map.elems Map.empty Map.lookup Map.insert
subGroupUniq :: Uniquable a => [(a, EquationInfo)] -> [[EquationInfo]]
subGroupUniq =
subGroup eltsUDFM emptyUDFM (flip lookupUDFM) (\k v m -> addToUDFM m k v)
{- Note [Pattern synonym groups]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we see
f (P a) = e1
f (P b) = e2
...
where P is a pattern synonym, can we put (P a -> e1) and (P b -> e2) in the
same group? We can if P is a constructor, but /not/ if P is a pattern synonym.
Consider (Trac #11224)
-- readMaybe :: Read a => String -> Maybe a
pattern PRead :: Read a => () => a -> String
pattern PRead a <- (readMaybe -> Just a)
f (PRead (x::Int)) = e1
f (PRead (y::Bool)) = e2
This is all fine: we match the string by trying to read an Int; if that
fails we try to read a Bool. But clearly we can't combine the two into a single
match.
Conclusion: we can combine when we invoke PRead /at the same type/. Hence
in PgSyn we record the instantiaing types, and use them in sameGroup.
Note [Take care with pattern order]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In the subGroup function we must be very careful about pattern re-ordering,
Consider the patterns [ (True, Nothing), (False, x), (True, y) ]
Then in bringing together the patterns for True, we must not
swap the Nothing and y!
-}
sameGroup :: PatGroup -> PatGroup -> Bool
-- Same group means that a single case expression
-- or test will suffice to match both, *and* the order
-- of testing within the group is insignificant.
sameGroup PgAny PgAny = True
sameGroup PgBang PgBang = True
sameGroup (PgCon _) (PgCon _) = True -- One case expression
sameGroup (PgSyn p1 t1) (PgSyn p2 t2) = p1==p2 && eqTypes t1 t2
-- eqTypes: See Note [Pattern synonym groups]
sameGroup (PgLit _) (PgLit _) = True -- One case expression
sameGroup (PgN l1) (PgN l2) = l1==l2 -- Order is significant
sameGroup (PgNpK l1) (PgNpK l2) = l1==l2 -- See Note [Grouping overloaded literal patterns]
sameGroup (PgCo t1) (PgCo t2) = t1 `eqType` t2
-- CoPats are in the same goup only if the type of the
-- enclosed pattern is the same. The patterns outside the CoPat
-- always have the same type, so this boils down to saying that
-- the two coercions are identical.
sameGroup (PgView e1 t1) (PgView e2 t2) = viewLExprEq (e1,t1) (e2,t2)
-- ViewPats are in the same group iff the expressions
-- are "equal"---conservatively, we use syntactic equality
sameGroup _ _ = False
-- An approximation of syntactic equality used for determining when view
-- exprs are in the same group.
-- This function can always safely return false;
-- but doing so will result in the application of the view function being repeated.
--
-- Currently: compare applications of literals and variables
-- and anything else that we can do without involving other
-- HsSyn types in the recursion
--
-- NB we can't assume that the two view expressions have the same type. Consider
-- f (e1 -> True) = ...
-- f (e2 -> "hi") = ...
viewLExprEq :: (LHsExpr Id,Type) -> (LHsExpr Id,Type) -> Bool
viewLExprEq (e1,_) (e2,_) = lexp e1 e2
where
lexp :: LHsExpr Id -> LHsExpr Id -> Bool
lexp e e' = exp (unLoc e) (unLoc e')
---------
exp :: HsExpr Id -> HsExpr Id -> Bool
-- real comparison is on HsExpr's
-- strip parens
exp (HsPar (L _ e)) e' = exp e e'
exp e (HsPar (L _ e')) = exp e e'
-- because the expressions do not necessarily have the same type,
-- we have to compare the wrappers
exp (HsWrap h e) (HsWrap h' e') = wrap h h' && exp e e'
exp (HsVar i) (HsVar i') = i == i'
-- the instance for IPName derives using the id, so this works if the
-- above does
exp (HsIPVar i) (HsIPVar i') = i == i'
exp (HsOverLabel l) (HsOverLabel l') = l == l'
exp (HsOverLit l) (HsOverLit l') =
-- Overloaded lits are equal if they have the same type
-- and the data is the same.
-- this is coarser than comparing the SyntaxExpr's in l and l',
-- which resolve the overloading (e.g., fromInteger 1),
-- because these expressions get written as a bunch of different variables
-- (presumably to improve sharing)
eqType (overLitType l) (overLitType l') && l == l'
exp (HsApp e1 e2) (HsApp e1' e2') = lexp e1 e1' && lexp e2 e2'
-- the fixities have been straightened out by now, so it's safe
-- to ignore them?
exp (OpApp l o _ ri) (OpApp l' o' _ ri') =
lexp l l' && lexp o o' && lexp ri ri'
exp (NegApp e n) (NegApp e' n') = lexp e e' && syn_exp n n'
exp (SectionL e1 e2) (SectionL e1' e2') =
lexp e1 e1' && lexp e2 e2'
exp (SectionR e1 e2) (SectionR e1' e2') =
lexp e1 e1' && lexp e2 e2'
exp (ExplicitTuple es1 _) (ExplicitTuple es2 _) =
eq_list tup_arg es1 es2
exp (HsIf _ e e1 e2) (HsIf _ e' e1' e2') =
lexp e e' && lexp e1 e1' && lexp e2 e2'
-- Enhancement: could implement equality for more expressions
-- if it seems useful
-- But no need for HsLit, ExplicitList, ExplicitTuple,
-- because they cannot be functions
exp _ _ = False
---------
syn_exp :: SyntaxExpr Id -> SyntaxExpr Id -> Bool
syn_exp (SyntaxExpr { syn_expr = expr1
, syn_arg_wraps = arg_wraps1
, syn_res_wrap = res_wrap1 })
(SyntaxExpr { syn_expr = expr2
, syn_arg_wraps = arg_wraps2
, syn_res_wrap = res_wrap2 })
= exp expr1 expr2 &&
and (zipWithEqual "viewLExprEq" wrap arg_wraps1 arg_wraps2) &&
wrap res_wrap1 res_wrap2
---------
tup_arg (L _ (Present e1)) (L _ (Present e2)) = lexp e1 e2
tup_arg (L _ (Missing t1)) (L _ (Missing t2)) = eqType t1 t2
tup_arg _ _ = False
---------
wrap :: HsWrapper -> HsWrapper -> Bool
-- Conservative, in that it demands that wrappers be
-- syntactically identical and doesn't look under binders
--
-- Coarser notions of equality are possible
-- (e.g., reassociating compositions,
-- equating different ways of writing a coercion)
wrap WpHole WpHole = True
wrap (WpCompose w1 w2) (WpCompose w1' w2') = wrap w1 w1' && wrap w2 w2'
wrap (WpFun w1 w2 _) (WpFun w1' w2' _) = wrap w1 w1' && wrap w2 w2'
wrap (WpCast co) (WpCast co') = co `eqCoercion` co'
wrap (WpEvApp et1) (WpEvApp et2) = et1 `ev_term` et2
wrap (WpTyApp t) (WpTyApp t') = eqType t t'
-- Enhancement: could implement equality for more wrappers
-- if it seems useful (lams and lets)
wrap _ _ = False
---------
ev_term :: EvTerm -> EvTerm -> Bool
ev_term (EvId a) (EvId b) = a==b
ev_term (EvCoercion a) (EvCoercion b) = a `eqCoercion` b
ev_term _ _ = False
---------
eq_list :: (a->a->Bool) -> [a] -> [a] -> Bool
eq_list _ [] [] = True
eq_list _ [] (_:_) = False
eq_list _ (_:_) [] = False
eq_list eq (x:xs) (y:ys) = eq x y && eq_list eq xs ys
patGroup :: DynFlags -> Pat Id -> PatGroup
patGroup _ (ConPatOut { pat_con = L _ con
, pat_arg_tys = tys })
| RealDataCon dcon <- con = PgCon dcon
| PatSynCon psyn <- con = PgSyn psyn tys
patGroup _ (WildPat {}) = PgAny
patGroup _ (BangPat {}) = PgBang
patGroup _ (NPat (L _ olit) mb_neg _ _) = PgN (hsOverLitKey olit (isJust mb_neg))
patGroup _ (NPlusKPat _ (L _ olit) _ _ _ _)= PgNpK (hsOverLitKey olit False)
patGroup _ (CoPat _ p _) = PgCo (hsPatType p) -- Type of innelexp pattern
patGroup _ (ViewPat expr p _) = PgView expr (hsPatType (unLoc p))
patGroup _ (ListPat _ _ (Just _)) = PgOverloadedList
patGroup dflags (LitPat lit) = PgLit (hsLitKey dflags lit)
patGroup _ pat = pprPanic "patGroup" (ppr pat)
{-
Note [Grouping overloaded literal patterns]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
WATCH OUT! Consider
f (n+1) = ...
f (n+2) = ...
f (n+1) = ...
We can't group the first and third together, because the second may match
the same thing as the first. Same goes for *overloaded* literal patterns
f 1 True = ...
f 2 False = ...
f 1 False = ...
If the first arg matches '1' but the second does not match 'True', we
cannot jump to the third equation! Because the same argument might
match '2'!
Hence we don't regard 1 and 2, or (n+1) and (n+2), as part of the same group.
-}
|
GaloisInc/halvm-ghc
|
compiler/deSugar/Match.hs
|
bsd-3-clause
| 41,027 | 12 | 21 | 10,866 | 7,091 | 3,722 | 3,369 | 397 | 28 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveGeneric #-}
module Aria.Types where
import Data.Text hiding (replicate, length, take)
import Data.Data
import Data.Time (UTCTime(..), NominalDiffTime)
import Data.SafeCopy
import Control.Lens
import GHC.Generics hiding (to)
type Repository = FilePath
type SHA = String
type RaceTime = Integer
type BuildName = Text
newtype RacerId = RacerId
{ _unRacerId :: Integer
} deriving (Eq, Ord, Show, Read, Data, Typeable, Generic)
-- | The main racer data types
data Racer = Racer
{ _racerName :: Text -- ^ The racer's real name
, _racerId :: RacerId
, _selectedBuild :: Maybe BuildName
} deriving (Show, Read, Eq, Ord, Data, Typeable, Generic)
data RacerBuild = RacerBuild
{ _buildName :: BuildName
, _buildRev :: SHA
, _buildDate :: UTCTime
, _buildRacerId :: RacerId
} deriving (Eq, Ord, Show, Read, Data, Typeable)
makeLenses ''Racer
$(deriveSafeCopy 0 'base ''Racer)
makeLenses ''RacerBuild
$(deriveSafeCopy 0 'base ''RacerBuild)
makeLenses ''RacerId
$(deriveSafeCopy 0 'base ''RacerId)
|
theNerd247/ariaRacer
|
src/Aria/Types.hs
|
bsd-3-clause
| 1,109 | 0 | 9 | 190 | 335 | 190 | 145 | 34 | 0 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
module Andromeda.Simulator.Internal.SimulationCompiler where
import qualified Data.ByteString.Char8 as BS
import qualified Data.Map as M
import qualified Control.Monad.Trans.State as S
import Control.Monad.IO.Class (liftIO)
import Control.Monad
import Control.Concurrent
import Control.Concurrent.STM
import Control.Lens
import Data.Maybe
import Andromeda.Types.Hardware
import Andromeda.Types.Language.Hardware
import Andromeda.Simulator.Types
import Andromeda.Utils.Assert
data ComposingDevice = ComposingDevice
{ _composingSensors :: M.Map ComponentIndex SensorNode
, _composingController :: Maybe (ComponentIndex, ControllerNode)
}
data CompilerState = CompilerState
{ _simulationModel :: SimulationModel
, _composingDevice :: ComposingDevice
}
makeLenses ''ComposingDevice
makeLenses ''CompilerState
type SimCompilerState = S.StateT CompilerState IO
emptyComposingDevice = ComposingDevice M.empty Nothing
emptyCompilerState = CompilerState emptySimModel emptyComposingDevice
assertNoSensor idx = do
mbS <- use (composingDevice . composingSensors . at idx)
assert (isNothing mbS) "Sensor exist" idx
assertNoController idx = do
mbC <- use $ composingDevice . composingController
assert (isNothing mbC) "Controller exist" idx
mkDefaultSensorNode p = do
tvP <- liftIO $ newTVarIO p
tvG <- liftIO $ newTVarIO NoGenerator
tvProd <- liftIO $ newTVarIO False
return $ SensorNode tvP tvG tvProd
mkDefaultControllerNode = return ControllerNode
instance HdlInterpreter SimCompilerState where
onSensorDef compDef compIdx par = do
assertNoSensor compIdx
sn <- mkDefaultSensorNode par
composingDevice . composingSensors . at compIdx %= const (Just sn)
onControllerDef compDef compIdx = do
assertNoController compIdx
cn <- mkDefaultControllerNode
composingDevice . composingController .= Just (compIdx, cn)
instance HndlInterpreter SimCompilerState where
onDeviceDef pa hdl d = do
interpretHdl hdl
m <- use $ composingDevice . composingSensors
let m' = M.mapKeys (\compIdx -> (pa, compIdx)) m
simulationModel . sensorsModel %= M.union m'
composingDevice .= emptyComposingDevice
return $ mkDeviceInterface pa
onTerminalUnitDef pa d = return $ mkTerminalUnitInterface pa
onLogicControlDef pa d = return $ mkInterface pa
onLinkedDeviceDef (DeviceInterface rdi) (TerminalUnitInterface tui) = return ()
onLinkDef interf tui = return ()
compileSimModel :: Hndl () -> IO SimulationModel
compileSimModel hndl = do
let compiler = interpretHndl hndl
(CompilerState m _) <- S.execStateT compiler emptyCompilerState
return m
|
graninas/Andromeda
|
src/Andromeda/Simulator/Internal/SimulationCompiler.hs
|
bsd-3-clause
| 2,754 | 0 | 14 | 423 | 721 | 364 | 357 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
module Strip (strip) where
import Data.List (tails, minimumBy)
import Data.Ord (comparing)
import Control.Monad (forM_, when)
import qualified Data.ByteString.Lazy.Char8 as B
import Options
import Fasta
-- Find best match starting within 'shift' elements from the beginning of 'y'
-- that minimizes the edit distance to 'x'. If edit distance exceeds 'maxErr'
-- then abort the search.
bestMatch :: Eq a => Int -> Int -> [a] -> [a] -> (Int, Int, Int)
bestMatch maxErr shift primer y = (nerr, nshift, nmatch)
where
(nshift,(nerr,nmatch)) = minimumBy (comparing (fst . snd)) $ zip [0..] shifts
shifts = map (editDist e primer) (take k $ tails y)
e = max 0 maxErr
k = max 1 (shift + 1)
-- Calculate edit distance to 'primer'. If edit distance exceeds 'maxErr' then
-- abort the search. The time complexity in 'maxErr' is exponential in the
-- current implementation, so choose it small (e.g. 2).
-- Return '(nerr,nmatch)', where 'nerr' is the number of errors and 'nmatch' is
-- the number of elements that "matched" the primer (up to
-- substitution/insertion/deletion).
editDist :: Eq a => Int -> [a] -> [a] -> (Int, Int)
editDist maxErr primer = go 0 0 primer
where
go !nerr !nmatch [] _ = (nerr, nmatch)
go !nerr !nmatch xs [] = (nerr + length xs, nmatch)
go !nerr !nmatch xa@(x:xs) ya@(y:ys)
| nerr > maxErr = (nerr, nmatch) -- give up
| x == y = go nerr (nmatch+1) xs ys -- exact match
| otherwise = minimumBy (comparing fst) [
go (nerr+1) (nmatch+1) xs ys -- substitution
, go (nerr+1) nmatch xs ya -- insertion
, go (nerr+1) (nmatch+1) xa ys ] -- deletion
strip :: IseqOptions -> IO ()
strip opt = do
let lopt = optCommand opt
opath = optInput lopt
oshift = optShift lopt
oerr = optErrors lopt
oskip = max 0 $ optSkip lopt
orev = optReverse lopt
order = if orev then reverse else id
oprimer = order $ optPrimer lopt
keyval k v | null v = B.empty
| orev = B.pack $ 'r':k ++ '=':v
| otherwise = B.pack $ k ++ '=':v
entries <- readFasta opath
forM_ entries $ \entry -> do
let sequence = order $ B.unpack $ fastaSequence entry
quality = fmap (order . B.unpack) $ fastaQuality entry
(nerr, nshift, nmatch) = bestMatch oerr oshift oprimer
$ drop oskip sequence
when (nerr <= oerr) $ do
let hdr = B.unwords $ filter (not . B.null) [
fastaHeader entry
, keyval "shift" $ order $ take nshift $ drop oskip sequence
, keyval "primer" $ order $ take nmatch
$ drop (oskip+nshift) sequence
, keyval "primer_err" $ show nerr ]
sqn = B.pack $ order $ dropAt oskip (nshift+nmatch) sequence
qual = fmap (B.pack . order . dropAt oskip (nshift+nmatch)) quality
B.putStr $ showFasta $ Fasta hdr sqn qual
dropAt :: Int -> Int -> [a] -> [a]
dropAt n m xs = take n xs ++ drop (n+m) xs
|
b4winckler/iseq
|
src/Strip.hs
|
bsd-3-clause
| 3,142 | 0 | 23 | 941 | 1,073 | 556 | 517 | 56 | 3 |
{-# LANGUAGE LambdaCase #-}
module Main where
import Control.Monad.Error
import Search.Types
import Search.Graph
import qualified Data.List as List
import qualified Data.List.Split as List
import System.Environment (getArgs)
import Mote.Debug
import Mote.LoadFile
import Mote.Search
import qualified Data.Map as M
import Data.Function (on)
main :: IO ()
main = do
(filePath:nStr:fromStr:toStr:_) <- getArgs
let n = read nStr :: Int
from = List.splitOn "," fromStr
to = List.splitOn "," toStr
void . runWithTestRef' $ \r -> runErrorT $ do
loadFile r filePath
ts <- transesInScope
liftIO $ print (length ts)
gs <- search from to n
liftIO $
mapM (\(t, g) ->
print (renderAnnotatedTerm t, lex (t,g)) )
. List.sortBy (compare `on` lex)
. map (\g -> (toTerm g, g))
. map deleteStrayVertices
$ gs
where
lex (t, g) = (numHoles t, M.size (digraph g), length $ connectedComponents g)
|
imeckler/mote
|
Search/Test.hs
|
bsd-3-clause
| 964 | 0 | 23 | 226 | 370 | 199 | 171 | 32 | 1 |
module Data.UpperBoundedPred
( UpperBoundedPred
, applyUpperBoundedPred
, OC (..)
, comparator
, Interval (..)
, isIn
, lcro
, fromInterval
, intervalUpperBound
) where
data UpperBoundedPred a = UpperBoundedPred (a -> Bool) (Maybe a) deriving (Typeable)
applyUpperBoundedPred :: UpperBoundedPred a -> a -> Bool
applyUpperBoundedPred (UpperBoundedPred a _) = a
-- | @
-- if upperBound p < x then applyUBP p x == True
-- @
upperBound :: UpperBoundedPred a -> Maybe a
upperBound (UpperBoundedPred _ a) = a
-- | alias
applyUBP :: UpperBoundedPred a -> a -> Bool
applyUBP = applyUpperBoundedPred
data OC = Open | Closed deriving (Eq, Ord, Enum, Bounded, Show, Read, Data, Typeable)
comparator :: Ord a => OC -> a -> a -> Bool
comparator Closed = (<=)
comparator Open = (<)
data Interval a = L OC a | R OC a | LR OC a OC a deriving (Eq, Ord, Show, Data, Typeable)
-- | >>> filter (`isIn` LR Closed 3 Open 7) [0..9]
-- [3,4,5,6]
isIn :: Ord a => a -> Interval a -> Bool
x `isIn` (L oc l) = comparator oc l x
x `isIn` (R oc r) = comparator oc x r
x `isIn` (LR loc l roc r) = (x `isIn` L loc l) && (x `isIn` R roc r)
-- | left-closed right-open interval
lcro :: Ord a => a -> a -> Interval a
lcro l r = LR Closed l Open r
fromInterval :: Ord a => Interval a -> UpperBoundedPred a
fromInterval x = UpperBoundedPred (`isIn` x) (intervalUpperBound x)
intervalUpperBound :: Ord a => Interval a -> Maybe (OC, a)
intervalUpperBound (R oc r) = Just (oc, r)
intervalUpperBound (LR _ _ oc r) = Just (oc, r)
intervalUpperBound _ = Nothing
|
kmyk/proof-haskell
|
Data/UpperBoundedPred.hs
|
mit
| 1,576 | 0 | 8 | 347 | 623 | 333 | 290 | 34 | 1 |
solve a b l | a * b > l = 0
| b * c > l = (l `div` a - b) `div` a + 1
| otherwise = 1 + (solve a c l) + (solve b c l)
where c = a + b
main = print $ solve 1 100 l + 49
where l = (10 ^ 8) `div` 2
|
EdisonAlgorithms/ProjectEuler
|
vol4/198.hs
|
mit
| 228 | 0 | 10 | 99 | 155 | 80 | 75 | 6 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.Pane.WebKit.Output
-- Copyright : 2007-2011 Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GPL
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module IDE.Pane.WebKit.Output (
IDEOutput(..)
, OutputState(..)
, getOutputPane
, setOutput
, loadOutputUri
) where
import Graphics.UI.Frame.Panes
(RecoverablePane(..), PanePath, RecoverablePane, Pane(..))
import Graphics.UI.Gtk
(scrolledWindowSetShadowType, entryGetText, entryActivated,
boxPackStart, entrySetText, Entry, VBox, entryNew, vBoxNew,
postGUISync, scrolledWindowSetPolicy, scrolledWindowNew,
castToWidget, ScrolledWindow)
import IDE.Utils.GUIUtils
import Data.Typeable (Typeable)
import IDE.Core.Types (IDEAction, IDEM, IDE(..))
import Control.Monad.IO.Class (MonadIO(..))
import Graphics.UI.Frame.ViewFrame (getNotebook)
import IDE.Core.State
(modifyIDE_, postSyncIDE, reifyIDE, leksahOrPackageDir)
import Graphics.UI.Gtk.General.Enums
(ShadowType(..), Packing(..), PolicyType(..))
#ifdef WEBKITGTK
import Graphics.UI.Gtk
(toggleActionActive, castToMenuItem, actionCreateMenuItem,
toggleActionNew, menuShellAppend, toggleActionSetActive,
menuItemActivate, menuItemNewWithLabel, eventModifier,
eventKeyName, keyPressEvent, focusInEvent, containerAdd,
Modifier(..), after)
import Graphics.UI.Gtk.WebKit.Types (WebView(..))
import Graphics.UI.Gtk.WebKit.WebView
(populatePopup, webViewGoBack, webViewZoomOut, webViewZoomIn,
webViewLoadString, webViewZoomLevel, webViewReload, webViewNew,
webViewLoadUri)
import System.Glib.Attributes (AttrOp(..), set, get)
import System.Glib.Signals (on)
import IDE.Core.State (reflectIDE)
import Graphics.UI.Editor.Basics (Connection(..))
import Text.Show.Pretty
(HtmlOpts(..), defaultHtmlOpts, valToHtmlPage, parseValue, getDataDir)
import System.FilePath ((</>))
import IDE.Pane.WebKit.Inspect (getInspectPane, IDEInspect(..))
import Graphics.UI.Gtk.WebKit.WebSettings
(webSettingsEnableDeveloperExtras)
import Graphics.UI.Gtk.WebKit.WebInspector (inspectWebView)
#endif
import Data.IORef (writeIORef, newIORef, readIORef, IORef)
import Control.Applicative ((<$>))
import System.Log.Logger (debugM)
import Graphics.UI.Gtk.WebKit.WebView
(webViewSetWebSettings, webViewGetWebSettings, webViewGetInspector,
loadCommitted, webViewGetUri)
import Graphics.UI.Gtk.WebKit.WebFrame (webFrameGetUri)
import Data.Text (Text)
import qualified Data.Text as T (unpack, pack)
data IDEOutput = IDEOutput {
vbox :: VBox
, uriEntry :: Entry
#ifdef WEBKITGTK
, webView :: WebView
, alwaysHtmlRef :: IORef Bool
#else
, outState :: IORef OutputState
#endif
} deriving Typeable
data OutputState = OutputState {
zoom :: Float
, alwaysHtml :: Bool
} deriving(Eq,Ord,Read,Show,Typeable)
instance Pane IDEOutput IDEM
where
primPaneName _ = "Out"
getAddedIndex _ = 0
getTopWidget = castToWidget . vbox
paneId b = "*Out"
instance RecoverablePane IDEOutput OutputState IDEM where
saveState p = liftIO $
#ifdef WEBKITGTK
do zoom <- webView p `get` webViewZoomLevel
alwaysHtml <- readIORef $ alwaysHtmlRef p
return (Just OutputState{..})
#else
Just <$> readIORef (outState p)
#endif
recoverState pp OutputState {..} = do
nb <- getNotebook pp
mbPane <- buildPane pp nb builder
case mbPane of
Nothing -> return ()
Just p -> liftIO $
#ifdef WEBKITGTK
do webView p `set` [webViewZoomLevel := zoom]
writeIORef (alwaysHtmlRef p) alwaysHtml
#else
writeIORef (outState p) OutputState {..}
#endif
return mbPane
builder pp nb windows = reifyIDE $ \ ideR -> do
vbox <- vBoxNew False 0
uriEntry <- entryNew
entrySetText uriEntry ("http://" :: Text)
scrolledView <- scrolledWindowNew Nothing Nothing
scrolledWindowSetShadowType scrolledView ShadowIn
boxPackStart vbox uriEntry PackNatural 0
boxPackStart vbox scrolledView PackGrow 0
#ifdef WEBKITGTK
webView <- webViewNew
alwaysHtmlRef <- newIORef False
containerAdd scrolledView webView
#else
outState <- newIORef OutputState {zoom = 1.0}
#endif
scrolledWindowSetPolicy scrolledView PolicyAutomatic PolicyAutomatic
let out = IDEOutput {..}
#ifdef WEBKITGTK
cid1 <- after webView focusInEvent $ do
liftIO $ reflectIDE (makeActive out) ideR
return True
webView `set` [webViewZoomLevel := 2.0]
cid2 <- on webView keyPressEvent $ do
key <- eventKeyName
mod <- eventModifier
liftIO $ case (key, mod) of
("plus", [Shift,Control]) -> webViewZoomIn webView >> return True
("minus",[Control]) -> webViewZoomOut webView >> return True
("BackSpace", [Shift]) -> webViewGoBack webView >> return True
_ -> return False
cid3 <- on webView populatePopup $ \ menu -> do
alwaysHtml <- readIORef alwaysHtmlRef
action <- toggleActionNew "AlwaysHTML" (__"Always HTML") Nothing Nothing
item <- castToMenuItem <$> actionCreateMenuItem action
item `on` menuItemActivate $ writeIORef alwaysHtmlRef $ not alwaysHtml
toggleActionSetActive action alwaysHtml
menuShellAppend menu item
return ()
cid4 <- on uriEntry entryActivated $ do
uri <- entryGetText uriEntry
webViewLoadUri webView uri
(`reflectIDE` ideR) $ modifyIDE_ (\ide -> ide {autoURI = Just uri})
cid5 <- on webView loadCommitted $ \ frame -> do
mbUri <- webFrameGetUri frame
valueUri <- getValueUri
case mbUri of
Just uri | uri /= valueUri -> do
entrySetText uriEntry uri
(`reflectIDE` ideR) $ modifyIDE_ (\ide -> ide {autoURI = Just uri})
Just _ -> do
(`reflectIDE` ideR) $ modifyIDE_ (\ide -> ide {autoURI = Nothing})
Nothing -> return ()
cid6 <- uriEntry `after` focusInEvent $ do
liftIO $ reflectIDE (makeActive out) ideR
return True
settings <- webViewGetWebSettings webView
settings `set` [webSettingsEnableDeveloperExtras := True]
webViewSetWebSettings webView settings
inspector <- webViewGetInspector webView
cid7 <- on inspector inspectWebView $ \view -> (`reflectIDE` ideR) $ do
inspectPane <- getInspectPane Nothing
displayPane inspectPane False
return $ inspectView inspectPane
return (Just out, [ConnectC cid1, ConnectC cid2, ConnectC cid3, ConnectC cid4, ConnectC cid5, ConnectC cid6])
#else
return (Just out, [])
#endif
getOutputPane :: Maybe PanePath -> IDEM IDEOutput
getOutputPane Nothing = forceGetPane (Right "*Out")
getOutputPane (Just pp) = forceGetPane (Left pp)
getValueUri :: IO Text
getValueUri = do
dataDir <- map fixSep <$> leksahOrPackageDir "pretty-show" getDataDir
return . T.pack $ "file://"
++ (case dataDir of
('/':_) -> dataDir
_ -> '/':dataDir)
++ "/value.html"
where
fixSep '\\' = '/'
fixSep x = x
setOutput :: Text -> Text -> IDEAction
setOutput command str =
#ifdef WEBKITGTK
do out <- getOutputPane Nothing
liftIO $ do
entrySetText (uriEntry out) (T.pack $ show command)
uri <- getValueUri
alwaysHtml <- readIORef $ alwaysHtmlRef out
let view = webView out
html = case (alwaysHtml, parseValue $ T.unpack str) of
(False, Just value) -> T.pack $ valToHtmlPage defaultHtmlOpts value
_ -> str
webViewLoadString view html Nothing uri
#else
return ()
#endif
loadOutputUri :: FilePath -> IDEAction
loadOutputUri uri =
#ifdef WEBKITGTK
do out <- getOutputPane Nothing
let view = webView out
liftIO $ do
entrySetText (uriEntry out) (T.pack uri)
currentUri <- webViewGetUri view
if Just (T.pack uri) == currentUri
then webViewReload view
else webViewLoadUri view (T.pack uri)
#else
return ()
#endif
|
ChrisLane/leksah
|
src/IDE/Pane/WebKit/Output.hs
|
gpl-2.0
| 9,042 | 0 | 27 | 2,416 | 2,255 | 1,216 | 1,039 | 94 | 3 |
module Char where
isSpace :: Char -> Bool
isSpace c =
i == ord ' ' || i == ord '\t' || i == ord '\n' ||
i == ord '\r' || i == ord '\f' || i == ord '\v'
where
i = ord c
isUpper :: Char -> Bool
isUpper c = ord c >= ord 'A' && ord c <= ord 'Z'
isLower :: Char -> Bool
isLower c = ord c >= ord 'a' && ord c <= ord 'z'
isDigit :: Char -> Bool
isDigit c = ord c >= ord '0' && ord c <= ord '9'
isAlpha :: Char -> Bool
isAlpha c = isUpper c || isLower c
isAlphaNum :: Char -> Bool
isAlphaNum c = isAlpha c || isDigit c
toUpper :: Char -> Char
toUpper c
| isLower c = chr ( ord c - ord 'a' + ord 'A' )
| otherwise = c
toLower :: Char -> Char
toLower c
| isUpper c = chr ( ord c - ord 'A' + ord 'a' )
| otherwise = c
|
roberth/uu-helium
|
lib/simple/Char.hs
|
gpl-3.0
| 756 | 0 | 16 | 230 | 386 | 180 | 206 | 24 | 1 |
module ErrEmptyWhere2 where
class X a where
y :: a -> a
|
roberth/uu-helium
|
test/typeClassesStatic/ErrEmptyWhere2.hs
|
gpl-3.0
| 59 | 0 | 5 | 15 | 22 | 12 | 10 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.Lambda.UpdateFunctionConfiguration
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates the configuration parameters for the specified Lambda function
-- by using the values provided in the request. You provide only the
-- parameters you want to change. This operation must only be used on an
-- existing Lambda function and cannot be used to update the function\'s
-- code.
--
-- This operation requires permission for the
-- 'lambda:UpdateFunctionConfiguration' action.
--
-- /See:/ <http://docs.aws.amazon.com/lambda/latest/dg/API_UpdateFunctionConfiguration.html AWS API Reference> for UpdateFunctionConfiguration.
module Network.AWS.Lambda.UpdateFunctionConfiguration
(
-- * Creating a Request
updateFunctionConfiguration
, UpdateFunctionConfiguration
-- * Request Lenses
, ufcMemorySize
, ufcRole
, ufcHandler
, ufcTimeout
, ufcDescription
, ufcFunctionName
-- * Destructuring the Response
, functionConfiguration
, FunctionConfiguration
-- * Response Lenses
, fcMemorySize
, fcRuntime
, fcFunctionARN
, fcRole
, fcFunctionName
, fcCodeSize
, fcHandler
, fcTimeout
, fcLastModified
, fcDescription
) where
import Network.AWS.Lambda.Types
import Network.AWS.Lambda.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'updateFunctionConfiguration' smart constructor.
data UpdateFunctionConfiguration = UpdateFunctionConfiguration'
{ _ufcMemorySize :: !(Maybe Nat)
, _ufcRole :: !(Maybe Text)
, _ufcHandler :: !(Maybe Text)
, _ufcTimeout :: !(Maybe Nat)
, _ufcDescription :: !(Maybe Text)
, _ufcFunctionName :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'UpdateFunctionConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ufcMemorySize'
--
-- * 'ufcRole'
--
-- * 'ufcHandler'
--
-- * 'ufcTimeout'
--
-- * 'ufcDescription'
--
-- * 'ufcFunctionName'
updateFunctionConfiguration
:: Text -- ^ 'ufcFunctionName'
-> UpdateFunctionConfiguration
updateFunctionConfiguration pFunctionName_ =
UpdateFunctionConfiguration'
{ _ufcMemorySize = Nothing
, _ufcRole = Nothing
, _ufcHandler = Nothing
, _ufcTimeout = Nothing
, _ufcDescription = Nothing
, _ufcFunctionName = pFunctionName_
}
-- | The amount of memory, in MB, your Lambda function is given. AWS Lambda
-- uses this memory size to infer the amount of CPU allocated to your
-- function. Your function use-case determines your CPU and memory
-- requirements. For example, a database operation might need less memory
-- compared to an image processing function. The default value is 128 MB.
-- The value must be a multiple of 64 MB.
ufcMemorySize :: Lens' UpdateFunctionConfiguration (Maybe Natural)
ufcMemorySize = lens _ufcMemorySize (\ s a -> s{_ufcMemorySize = a}) . mapping _Nat;
-- | The Amazon Resource Name (ARN) of the IAM role that Lambda will assume
-- when it executes your function.
ufcRole :: Lens' UpdateFunctionConfiguration (Maybe Text)
ufcRole = lens _ufcRole (\ s a -> s{_ufcRole = a});
-- | The function that Lambda calls to begin executing your function. For
-- Node.js, it is the /module-name.export/ value in your function.
ufcHandler :: Lens' UpdateFunctionConfiguration (Maybe Text)
ufcHandler = lens _ufcHandler (\ s a -> s{_ufcHandler = a});
-- | The function execution time at which AWS Lambda should terminate the
-- function. Because the execution time has cost implications, we recommend
-- you set this value based on your expected execution time. The default is
-- 3 seconds.
ufcTimeout :: Lens' UpdateFunctionConfiguration (Maybe Natural)
ufcTimeout = lens _ufcTimeout (\ s a -> s{_ufcTimeout = a}) . mapping _Nat;
-- | A short user-defined function description. AWS Lambda does not use this
-- value. Assign a meaningful description as you see fit.
ufcDescription :: Lens' UpdateFunctionConfiguration (Maybe Text)
ufcDescription = lens _ufcDescription (\ s a -> s{_ufcDescription = a});
-- | The name of the Lambda function.
--
-- You can specify an unqualified function name (for example,
-- \"Thumbnail\") or you can specify Amazon Resource Name (ARN) of the
-- function (for example,
-- \"arn:aws:lambda:us-west-2:account-id:function:ThumbNail\"). AWS Lambda
-- also allows you to specify only the account ID qualifier (for example,
-- \"account-id:Thumbnail\"). Note that the length constraint applies only
-- to the ARN. If you specify only the function name, it is limited to 64
-- character in length.
ufcFunctionName :: Lens' UpdateFunctionConfiguration Text
ufcFunctionName = lens _ufcFunctionName (\ s a -> s{_ufcFunctionName = a});
instance AWSRequest UpdateFunctionConfiguration where
type Rs UpdateFunctionConfiguration =
FunctionConfiguration
request = putJSON lambda
response = receiveJSON (\ s h x -> eitherParseJSON x)
instance ToHeaders UpdateFunctionConfiguration where
toHeaders = const mempty
instance ToJSON UpdateFunctionConfiguration where
toJSON UpdateFunctionConfiguration'{..}
= object
(catMaybes
[("MemorySize" .=) <$> _ufcMemorySize,
("Role" .=) <$> _ufcRole,
("Handler" .=) <$> _ufcHandler,
("Timeout" .=) <$> _ufcTimeout,
("Description" .=) <$> _ufcDescription])
instance ToPath UpdateFunctionConfiguration where
toPath UpdateFunctionConfiguration'{..}
= mconcat
["/2015-03-31/functions/", toBS _ufcFunctionName,
"/versions/HEAD/configuration"]
instance ToQuery UpdateFunctionConfiguration where
toQuery = const mempty
|
fmapfmapfmap/amazonka
|
amazonka-lambda/gen/Network/AWS/Lambda/UpdateFunctionConfiguration.hs
|
mpl-2.0
| 6,508 | 0 | 11 | 1,320 | 821 | 499 | 322 | 101 | 1 |
-------------------------------------------------------------------------------------------
-- |
-- Module : Control.Morphism.Universal
-- Copyright : 2008 Edward Kmett
-- License : BSD3
--
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : portable
--
-- Note the choice of which is universal and which is couniversal is chosen to
-- make the definitions consistent with limits and colimits.
-------------------------------------------------------------------------------------------
module Control.Morphism.Universal
( Couniversal(..), extractCouniversal, couniversalize
, couniversalIdentity
, Universal(..), extractUniversal, universalize
, universalIdentity
) where
import Control.Monad.Identity
data Couniversal a f x = Couniversal (a -> f x) (forall z. (a -> f z) -> x -> z)
extractCouniversal :: Couniversal a f x -> a -> f x
extractCouniversal (Couniversal f _) = f
couniversalize :: (a -> f z) -> Couniversal a f x -> x -> z
couniversalize f (Couniversal _ s) = s f
couniversalIdentity :: Couniversal a Identity a
couniversalIdentity = Couniversal Identity (runIdentity .)
data Universal a f x = Universal (f x -> a) (forall z. (f z -> a) -> z -> x)
extractUniversal :: Universal a f x -> f x -> a
extractUniversal (Universal f _) = f
universalize :: Universal a f x -> (f z -> a) -> z -> x
universalize (Universal _ s) f = s f
universalIdentity :: Universal a Identity a
universalIdentity = Universal runIdentity (. Identity)
|
urska19/MFP---Samodejno-racunanje-dvosmernih-preslikav
|
Control/Morphism/Universal.hs
|
apache-2.0
| 1,501 | 4 | 12 | 244 | 404 | 223 | 181 | -1 | -1 |
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedLists #-}
{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
module Database.DSH.VSL.Builtins where
import Control.Monad
import qualified Data.List as List
import qualified Data.List.NonEmpty as N
import Data.List.NonEmpty (NonEmpty((:|)))
import qualified Data.Sequence as S
import Database.Algebra.Dag.Common
import Database.DSH.Common.Impossible
import qualified Database.DSH.Common.Lang as L
import Database.DSH.Common.Nat
import Database.DSH.Common.QueryPlan
import qualified Database.DSH.Common.Type as T
import Database.DSH.Common.Vector
import Database.DSH.Common.VectorLang
import Database.DSH.VSL.Construct (VSLBuild)
import qualified Database.DSH.VSL.Construct as C
newtype DataNode = DataNode AlgNode
newtype MapNode = MapNode AlgNode
data SegMap = IDMap
| UnitMap RVec
| RMap RVec
deriving (Show)
data DelayedVec = DelayedVec
{ dvSegMap :: SegMap
, dvPhysVec :: DVec
} deriving (Show)
pattern MatVec :: DVec -> DelayedVec
pattern MatVec v = DelayedVec IDMap v
--------------------------------------------------------------------------------
-- Unary scalar operators
unOpL :: L.ScalarUnOp -> Shape DelayedVec -> VSLBuild TExpr TExpr (Shape DelayedVec)
unOpL o (VShape (DelayedVec m v) LCol) = do
vp <- C.project (TUnApp o TInput) v
return $ VShape (DelayedVec m vp) LCol
--------------------------------------------------------------------------------
-- Binary scalar operators
binOpL :: L.ScalarBinOp -> Shape DelayedVec -> Shape DelayedVec -> VSLBuild TExpr TExpr (Shape DelayedVec)
binOpL o (VShape dv1 LCol) (VShape dv2 LCol) = do
case (dvSegMap dv1, dvSegMap dv2) of
(RMap m1, RMap _) -> do
v <- C.align (dvPhysVec dv1) (dvPhysVec dv2)
v' <- C.project (TBinApp o (TTupElem First TInput) (TTupElem (Next First) TInput)) v
return $ VShape (DelayedVec (RMap m1) v') LCol
(UnitMap m1, UnitMap _) -> do
v <- C.align (dvPhysVec dv1) (dvPhysVec dv2)
v' <- C.project (TBinApp o (TTupElem First TInput) (TTupElem (Next First) TInput)) v
return $ VShape (DelayedVec (UnitMap m1) v') LCol
(IDMap, IDMap) -> do
v <- C.align (dvPhysVec dv1) (dvPhysVec dv2)
v' <- C.project (TBinApp o (TTupElem First TInput) (TTupElem (Next First) TInput)) v
return $ VShape (DelayedVec IDMap v') LCol
(RMap m1, IDMap) -> do
-- Materialize the left input
-- We do not need the replication vector because the layout is flat
(mv1, _) <- C.materialize m1 (dvPhysVec dv1)
v <- C.align mv1 (dvPhysVec dv2)
v' <- C.project (TBinApp o (TTupElem First TInput) (TTupElem (Next First) TInput)) v
return $ VShape (DelayedVec IDMap v') LCol
(IDMap, RMap m2) -> do
-- Materialize the right input
-- We do not need the replication vector because the layout is flat
(mv2, _) <- C.materialize m2 (dvPhysVec dv2)
v <- C.align (dvPhysVec dv1) mv2
v' <- C.project (TBinApp o (TTupElem First TInput) (TTupElem (Next First) TInput)) v
return $ VShape (DelayedVec IDMap v') LCol
--------------------------------------------------------------------------------
-- Tuple indexing
tupElemL :: TupleIndex -> Shape DelayedVec -> VSLBuild TExpr TExpr (Shape DelayedVec)
tupElemL i (VShape dv (LTuple ls)) = do
let l = ls !! (tupleIndex i - 1)
vp <- C.project (TTupElem i TInput) (dvPhysVec dv)
return $ VShape (dv { dvPhysVec = vp }) (l)
--------------------------------------------------------------------------------
-- Singleton list construction
sngL :: Shape DelayedVec -> VSLBuild TExpr TExpr (Shape DelayedVec)
sngL (VShape (DelayedVec m v) l) = do
vo <- C.project TIndex v
vi <- C.segment v
return $ VShape (DelayedVec m vo) (LNest (DelayedVec IDMap vi) l)
--------------------------------------------------------------------------------
-- Aggregation
aggrL :: (TExpr -> AggrFun TExpr) -> Shape DelayedVec -> VSLBuild TExpr TExpr (Shape DelayedVec)
aggrL afun (VShape dvo (LNest dvi _)) = do
let a = afun TInput
-- Aggregate the physical segments without considering the segment map.
va <- C.aggrseg a (dvPhysVec dvi)
-- To unbox, we need to materialize the inner vector. Crucially, we
-- materialize *after* aggregation.
(vm, _) <- materializeShape (dvi { dvPhysVec = va }) LCol
vu <- fst <$> defaultUnboxOp a (dvPhysVec dvo) vm
vp <- C.project (TTupElem (Next First) TInput) vu
return $ VShape (dvo { dvPhysVec = vp }) LCol
defaultUnboxOp :: AggrFun TExpr -> DVec -> DVec -> VSLBuild TExpr TExpr (DVec, RVec)
defaultUnboxOp (AggrSum t _) = C.unboxdefault (pure $ sumDefault t)
where
sumDefault T.IntT = L.IntV 0
sumDefault T.DecimalT = L.DecimalV 0
sumDefault T.DoubleT = L.DoubleV 0
defaultUnboxOp (AggrAny _) = C.unboxdefault (pure $ L.BoolV False)
defaultUnboxOp (AggrAll _) = C.unboxdefault (pure $ L.BoolV True)
defaultUnboxOp AggrCount = C.unboxdefault (pure $ L.IntV 0)
defaultUnboxOp (AggrCountDistinct _) = C.unboxdefault (pure $ L.IntV 0)
defaultUnboxOp _ = C.unboxsng
--------------------------------------------------------------------------------
-- Concatenation
concat :: Shape DelayedVec -> VSLBuild TExpr TExpr (Shape DelayedVec)
concat (VShape _ (LNest dv l)) = do
(v, l') <- materializeShape dv l
v' <- C.unsegment v
return $ VShape (MatVec v') l'
concatL :: Shape DelayedVec -> VSLBuild TExpr TExpr (Shape DelayedVec)
concatL (VShape dvo (LNest dvi l)) = do
-- Generate a segment map that merges segments of inner vectors and maps
-- them to the segment identifiers of 'dvi'
mm <- C.mergemap (dvPhysVec dvi)
-- Combine the segment map of the middle vector with the merge map
mm' <- case dvSegMap dvi of
IDMap -> return mm
RMap m -> C.updatemap m mm
UnitMap m -> C.updatemap m mm
-- Update the inner segment maps with the merge map to get rid of the middle
-- vector and align the outer with the inner vectors.
l' <- updateLayoutMaps (RMap mm') l
return $ VShape dvo l'
--------------------------------------------------------------------------------
-- Tuple Construction
tupleL :: [Shape DelayedVec] -> VSLBuild TExpr TExpr (Shape DelayedVec)
tupleL shapes =
case shapes of
(VShape (DelayedVec (UnitMap m) v) l) : shs
| all isUnitShape shs -> do
let (vs, ls) = unzip $ map (\(VShape (DelayedVec _ v') l') -> (v', l')) shs
(va, es) <- alignVectors $ v :| vs
vd <- C.project (TMkTuple es) va
return $ VShape (DelayedVec (UnitMap m) vd) (LTuple $ l:ls)
(VShape dv1 l1) : shs -> do
(v1, l1') <- materializeShape dv1 l1
(vs, ls) <- unzip <$> mapM (\(VShape dv l) -> materializeShape dv l) shs
(va, es) <- alignVectors $ v1 :| vs
vd <- C.project (TMkTuple es) va
return $ VShape (MatVec vd) (LTuple $ l1' : ls)
alignVectors :: NonEmpty DVec -> VSLBuild TExpr TExpr (DVec, NonEmpty TExpr)
alignVectors (v :| []) = pure (v, pure TInput)
alignVectors (v :| (v' : vs)) = do
(vsa, es) <- alignVectors $ v' :| vs
va <- C.align v vsa
pure (va, TInpFirst N.<| fmap (mergeExpr TInpSecond) es)
isUnitShape :: Shape DelayedVec -> Bool
isUnitShape (VShape (DelayedVec (UnitMap _) _) _) = True
isUnitShape _ = False
--------------------------------------------------------------------------------
-- Singleton list conversion
onlyL :: Shape DelayedVec -> VSLBuild TExpr TExpr (Shape DelayedVec)
onlyL (VShape dvo (LNest dvi li)) = do
(vim, lim) <- materializeShape dvi li
(vu, r) <- C.unboxsng (dvPhysVec dvo) vim
lim' <- updateLayoutMaps (RMap r) lim
return $ VShape (dvo { dvPhysVec = vu }) lim'
--------------------------------------------------------------------------------
-- Unary vectorization macros
type UnVectMacro = DelayedVec -> Layout DelayedVec -> VSLBuild TExpr TExpr (DelayedVec, Layout DelayedVec)
--------------------------------------------------------------------------------
-- number
number :: UnVectMacro
number dv l = do
v <- C.number (dvPhysVec dv)
return (dv { dvPhysVec = v }, LTuple [l, LCol])
--------------------------------------------------------------------------------
-- distinct
nub :: UnVectMacro
nub dv l = do
v <- C.distinct (dvPhysVec dv)
return (dv { dvPhysVec = v }, l)
--------------------------------------------------------------------------------
-- reverse
reverse :: UnVectMacro
reverse dv l = do
(v, r) <- C.reverse (dvPhysVec dv)
l' <- updateLayoutMaps (RMap r) l
return (dv { dvPhysVec = v }, l')
--------------------------------------------------------------------------------
-- sort
sort :: UnVectMacro
sort dv (LTuple [xl, _]) = do
-- Sort by all sorting columns from the right tuple component
(v', r) <- C.sort (TTupElem (Next First) TInput) (dvPhysVec dv)
-- After sorting, discard the sorting criteria columns
v'' <- C.project (TTupElem First TInput) v'
l' <- updateLayoutMaps (RMap r) xl
return (dv { dvPhysVec = v''}, l')
--------------------------------------------------------------------------------
-- group
group :: UnVectMacro
group dv (LTuple [xl, gl]) = do
(vo, vi, r) <- C.group (TTupElem (Next First) TInput) (dvPhysVec dv)
vi' <- C.project (TTupElem First TInput) vi
xl' <- updateLayoutMaps (RMap r) xl
return (dv { dvPhysVec = vo }, LTuple [gl, LNest (DelayedVec IDMap vi') xl'])
--------------------------------------------------------------------------------
-- ext
constLyt :: VecVal -> Layout a
constLyt (VVTuple vs) = LTuple $ map constLyt vs
constLyt (VVScalar _) = LCol
constLyt VTIndex = LCol
rep :: VecVal -> Shape DelayedVec -> VSLBuild TExpr TExpr (Shape DelayedVec)
rep val (VShape dv _) = do
v' <- C.project (valExpr val) (dvPhysVec dv)
pure $ VShape (dv { dvPhysVec = v'}) (constLyt val)
--------------------------------------------------------------------------------
-- Filtering
restrict :: UnVectMacro
restrict dv (LTuple [l, LCol]) = do
-- Filter the vector according to the boolean column
(v, r) <- C.select (TTupElem (Next First) TInput) (dvPhysVec dv)
v' <- C.project (TTupElem First TInput) v
l' <- updateLayoutMaps (RMap r) l
return (dv { dvPhysVec = v' }, l')
--------------------------------------------------------------------------------
-- Applying unary vectorization macros
unMacroL :: UnVectMacro -> Shape DelayedVec -> VSLBuild TExpr TExpr (Shape DelayedVec)
unMacroL macro (VShape dvo (LNest dvi l)) = VShape dvo <$> uncurry LNest <$> macro dvi l
--------------------------------------------------------------------------------
-- Binary Vectorization Macros
type BinVectMacro = DelayedVec -> Layout DelayedVec
-> DelayedVec -> Layout DelayedVec
-> VSLBuild TExpr TExpr (DelayedVec, Layout DelayedVec)
--------------------------------------------------------------------------------
-- Applying binary vectorization macros
binMacroL :: BinVectMacro -> Shape DelayedVec -> Shape DelayedVec -> VSLBuild TExpr TExpr (Shape DelayedVec)
binMacroL macro (VShape dvo (LNest dvi1 l1)) (VShape _ (LNest dvi2 l2)) =
VShape dvo <$> uncurry LNest <$> macro dvi1 l1 dvi2 l2
--------------------------------------------------------------------------------
-- Binary Vectorization Macros
append :: BinVectMacro
append dv1 l1 dv2 l2 = do
(v1, l1') <- materializeShape dv1 l1
(v2, l2') <- materializeShape dv2 l2
(v, r1, r2) <- C.append v1 v2
l1'' <- updateLayoutMaps (RMap r1) l1'
l2'' <- updateLayoutMaps (RMap r2) l2'
l <- appendLayouts l1'' l2''
return $ (MatVec v, l)
zip :: BinVectMacro
zip dv1 l1 dv2 l2 = do
(v1, l1') <- materializeShape dv1 l1
(v2, l2') <- materializeShape dv2 l2
(v, r1, r2) <- C.zip v1 v2
l1'' <- updateLayoutMaps (RMap r1) l1'
l2'' <- updateLayoutMaps (RMap r2) l2'
return (MatVec v, LTuple [l1'', l2''])
nestjoin :: L.JoinPredicate L.ScalarExpr -> BinVectMacro
nestjoin p dv1 l1 (DelayedVec m2 v2) l2 = do
(v1, l1') <- materializeShape dv1 l1
case m2 of
IDMap -> do
(v, r1, r2) <- C.nestjoinMM (scalarExpr <$> p) v1 v2
vo <- C.project (TMkTuple [TInput, TIndex]) v1
l1'' <- updateLayoutMaps (RMap r1) l1'
l2' <- updateLayoutMaps (RMap r2) l2
return ( DelayedVec IDMap vo
, LTuple [l1', LNest (DelayedVec IDMap v) (LTuple [l1'', l2'])])
UnitMap _ -> do
(v, r1, r2) <- C.nestjoinMU (scalarExpr <$> p) v1 v2
vo <- C.project (TMkTuple [TInput, TIndex]) v1
l1'' <- updateLayoutMaps (RMap r1) l1'
l2' <- updateLayoutMaps (RMap r2) l2
return ( DelayedVec IDMap vo
, LTuple [l1', LNest (DelayedVec IDMap v) (LTuple [l1'', l2'])])
RMap m -> do
(v2', l2') <- materializeShape (DelayedVec (RMap m) v2) l2
(v, r1, r2) <- C.nestjoinMM (scalarExpr <$> p) v1 v2'
vo <- C.project (TMkTuple [TInput, TIndex]) v1
l1'' <- updateLayoutMaps (RMap r1) l1'
l2'' <- updateLayoutMaps (RMap r2) l2'
return ( DelayedVec IDMap vo
, LTuple [l1', LNest (DelayedVec IDMap v) (LTuple [l1'', l2''])])
thetajoin :: L.JoinPredicate L.ScalarExpr -> BinVectMacro
thetajoin p dv1 l1 dv2 l2 =
case (dvSegMap dv1, dvSegMap dv2) of
(UnitMap m1, UnitMap _) -> do
(v, r1, r2) <- C.thetajoinMM (scalarExpr <$> p) (dvPhysVec dv1) (dvPhysVec dv2)
l1' <- updateLayoutMaps (RMap r1) l1
l2' <- updateLayoutMaps (RMap r2) l2
return (DelayedVec (UnitMap m1) v, LTuple [l1', l2'])
(IDMap, UnitMap _) -> do
(v, r1, r2) <- C.thetajoinMU (scalarExpr <$> p) (dvPhysVec dv1) (dvPhysVec dv2)
l1' <- updateLayoutMaps (RMap r1) l1
l2' <- updateLayoutMaps (RMap r2) l2
return (DelayedVec IDMap v, LTuple [l1', l2'])
(UnitMap _, IDMap) -> do
(v, r1, r2) <- C.thetajoinUM (scalarExpr <$> p) (dvPhysVec dv1) (dvPhysVec dv2)
l1' <- updateLayoutMaps (RMap r1) l1
l2' <- updateLayoutMaps (RMap r2) l2
return (DelayedVec IDMap v, LTuple [l1', l2'])
_ -> do
(v1', l1') <- materializeShape dv1 l1
(v2', l2') <- materializeShape dv2 l2
(v, r1, r2) <- C.thetajoinMM (scalarExpr <$> p) v1' v2'
l1'' <- updateLayoutMaps (RMap r1) l1'
l2'' <- updateLayoutMaps (RMap r2) l2'
return (DelayedVec IDMap v, LTuple [l1'', l2''])
cartproduct :: BinVectMacro
cartproduct dv1 l1 dv2 l2 = do
(v1', l1') <- materializeShape dv1 l1
(v2', l2') <- materializeShape dv2 l2
(v, r1, r2) <- C.cartproduct v1' v2'
l1'' <- updateLayoutMaps (RMap r1) l1'
l2'' <- updateLayoutMaps (RMap r2) l2'
return (DelayedVec IDMap v, LTuple [l1'', l2''])
antijoin :: L.JoinPredicate L.ScalarExpr -> BinVectMacro
antijoin p dv1 l1 dv2 l2 =
case (dvSegMap dv1, dvSegMap dv2) of
(UnitMap m1, UnitMap _) -> do
(v, r1) <- C.antijoinMM (scalarExpr <$> p) (dvPhysVec dv1) (dvPhysVec dv2)
l1' <- updateLayoutMaps (RMap r1) l1
return (DelayedVec (UnitMap m1) v, l1')
(IDMap, UnitMap _) -> do
(v, r1) <- C.antijoinMU (scalarExpr <$> p) (dvPhysVec dv1) (dvPhysVec dv2)
l1' <- updateLayoutMaps (RMap r1) l1
return (DelayedVec IDMap v, l1')
(UnitMap _, IDMap) -> do
(v, r1) <- C.antijoinUM (scalarExpr <$> p) (dvPhysVec dv1) (dvPhysVec dv2)
l1' <- updateLayoutMaps (RMap r1) l1
return (DelayedVec IDMap v, l1')
_ -> do
(v1', l1') <- materializeShape dv1 l1
(v2', _) <- materializeShape dv2 l2
(v, r1) <- C.antijoinMM (scalarExpr <$> p) v1' v2'
l1'' <- updateLayoutMaps (RMap r1) l1'
return (DelayedVec IDMap v, l1'')
semijoin :: L.JoinPredicate L.ScalarExpr -> BinVectMacro
semijoin p dv1 l1 dv2 l2 =
case (dvSegMap dv1, dvSegMap dv2) of
(UnitMap m1, UnitMap _) -> do
(v, r1) <- C.semijoinMM (scalarExpr <$> p) (dvPhysVec dv1) (dvPhysVec dv2)
l1' <- updateLayoutMaps (RMap r1) l1
return (DelayedVec (UnitMap m1) v, l1')
(IDMap, UnitMap _) -> do
(v, r1) <- C.semijoinMU (scalarExpr <$> p) (dvPhysVec dv1) (dvPhysVec dv2)
l1' <- updateLayoutMaps (RMap r1) l1
return (DelayedVec IDMap v, l1')
(UnitMap _, IDMap) -> do
(v, r1) <- C.semijoinUM (scalarExpr <$> p) (dvPhysVec dv1) (dvPhysVec dv2)
l1' <- updateLayoutMaps (RMap r1) l1
return (DelayedVec IDMap v, l1')
_ -> do
(v1', l1') <- materializeShape dv1 l1
(v2', _) <- materializeShape dv2 l2
(v, r1) <- C.semijoinMM (scalarExpr <$> p) v1' v2'
l1'' <- updateLayoutMaps (RMap r1) l1'
return (DelayedVec IDMap v, l1'')
groupjoin :: L.JoinPredicate L.ScalarExpr -> L.NE (AggrFun TExpr) -> BinVectMacro
groupjoin p as dv1 l1 dv2 l2 =
case (dvSegMap dv1, dvSegMap dv2) of
(UnitMap m1, UnitMap _) -> do
v <- C.groupjoinMM (scalarExpr <$> p) as (dvPhysVec dv1) (dvPhysVec dv2)
return (DelayedVec (UnitMap m1) v, mkLyt l1)
(IDMap, UnitMap _) -> do
v <- C.groupjoinMU (scalarExpr <$> p) as (dvPhysVec dv1) (dvPhysVec dv2)
return (DelayedVec IDMap v, mkLyt l1)
(UnitMap _, IDMap) -> do
v <- C.groupjoinUM (scalarExpr <$> p) as (dvPhysVec dv1) (dvPhysVec dv2)
return (DelayedVec IDMap v, mkLyt l1)
_ -> do
(v1', l1') <- materializeShape dv1 l1
(v2', _) <- materializeShape dv2 l2
v <- C.groupjoinMM (scalarExpr <$> p) as v1' v2'
return (DelayedVec IDMap v, mkLyt l1')
where
mkLyt leftLyt = LTuple $ leftLyt : map (const LCol) (N.toList $ L.getNE as)
--------------------------------------------------------------------------------
-- Ternary Vectorization Macros
type TerVectMacro = DelayedVec -> Layout DelayedVec
-> DelayedVec -> Layout DelayedVec
-> DelayedVec -> Layout DelayedVec
-> VSLBuild TExpr TExpr (DelayedVec, Layout DelayedVec)
terMacroL :: TerVectMacro
-> Shape DelayedVec
-> Shape DelayedVec
-> Shape DelayedVec
-> VSLBuild TExpr TExpr (Shape DelayedVec)
terMacroL macro (VShape dvo (LNest dvi1 l1)) (VShape _ (LNest dvi2 l2)) (VShape _ (LNest dvi3 l3)) =
VShape dvo <$> uncurry LNest <$> macro dvi1 l1 dvi2 l2 dvi3 l3
--------------------------------------------------------------------------------
-- Conditionals
combine :: TerVectMacro
combine dvb LCol dv1 l1 dv2 l2 = do
(vb, _) <- materializeShape dvb LCol
(v1, l1') <- materializeShape dv1 l1
(v2, l2') <- materializeShape dv2 l2
(v, r1, r2) <- C.combine vb v1 v2
l1'' <- updateLayoutMaps (RMap r1) l1'
l2'' <- updateLayoutMaps (RMap r2) l2'
l <- appendLayouts l1'' l2''
return (MatVec v, l)
--------------------------------------------------------------------------------
-- Distribution/Replication
dist :: Shape DelayedVec -> Shape DelayedVec -> VSLBuild TExpr TExpr (Shape DelayedVec)
dist (VShape (DelayedVec IDMap v1) l1) (VShape dv2 _) = do
outerVec <- C.project TIndex (dvPhysVec dv2)
innerMap <- UnitMap <$> C.unitmap (dvPhysVec dv2)
return $ VShape (dv2 { dvPhysVec = outerVec }) (LNest (DelayedVec innerMap v1) l1)
dist _ _ = error "VSL.Vectorize.dist"
distL :: Shape DelayedVec -> Shape DelayedVec -> VSLBuild TExpr TExpr (Shape DelayedVec)
distL (VShape dv1 l1) (VShape dvo (LNest dv2 l2)) = do
(v1, l1') <- materializeShape dv1 l1
(v2, l2') <- materializeShape dv2 l2
(v, r) <- C.replicateseg v1 v2
l1'' <- updateLayoutMaps (RMap r) l1'
-- Keep only payload columns from the left (outer vector)
let l = LTuple [l1'', l2']
VShape dv' lf <- tupElemL First $ VShape (MatVec v) l
return $ VShape dvo (LNest dv' lf)
--------------------------------------------------------------------------------
materializeShape :: DelayedVec -> Layout DelayedVec -> VSLBuild TExpr TExpr (DVec, Layout DelayedVec)
materializeShape (DelayedVec sm v) l =
case sm of
IDMap -> return (v, l)
UnitMap m -> do
(v', r) <- C.materialize m v
l' <- updateLayoutMaps (RMap r) l
return (v', l')
RMap m -> do
(v', r) <- C.materialize m v
l' <- updateLayoutMaps (RMap r) l
return (v', l')
updateLayoutMaps :: SegMap -> Layout DelayedVec -> VSLBuild TExpr TExpr (Layout DelayedVec)
updateLayoutMaps newMap = go
where
updateDelayedVec (DelayedVec oldMap v) = DelayedVec <$> updateSegMap newMap oldMap <*> pure v
go (LNest dv lyt) = LNest <$> updateDelayedVec dv <*> pure lyt
go (LTuple lyts) = LTuple <$> traverse (updateLayoutMaps newMap) lyts
go LCol = pure LCol
updateSegMap :: SegMap -> SegMap -> VSLBuild TExpr TExpr SegMap
updateSegMap (RMap mapUpdate) (RMap oldMap) = RMap <$> C.updatemap mapUpdate oldMap
updateSegMap (RMap mapUpdate) (UnitMap _) = UnitMap <$> C.updateunit mapUpdate
updateSegMap (RMap mapUpdate) IDMap = pure $ RMap mapUpdate
updateSegMap _ _ = error "updateSegMap"
appendLayouts :: Layout DelayedVec -> Layout DelayedVec -> VSLBuild TExpr TExpr (Layout DelayedVec)
appendLayouts LCol LCol = return LCol
appendLayouts (LNest dv1 l1) (LNest dv2 l2) = do
(v1, l1') <- materializeShape dv1 l1
(v2, l2') <- materializeShape dv2 l2
(v, r1, r2) <- C.append v1 v2
l1'' <- updateLayoutMaps (RMap r1) l1'
l2'' <- updateLayoutMaps (RMap r2) l2'
l <- appendLayouts l1'' l2''
return $ LNest (MatVec v) l
appendLayouts (LTuple ls1) (LTuple ls2) =
LTuple <$> zipWithM appendLayouts ls1 ls2
--------------------------------------------------------------------------------
-- Construction of base tables and literal tables
-- | Create a SL reference to a base table.
dbTable :: String -> L.BaseTableSchema -> VSLBuild TExpr TExpr (Shape DelayedVec)
dbTable n schema = do
tab <- C.tableref n schema
-- Single-column tables are represented by a flat list and map to
-- a flat one-column layout. Multi-column tables map to a list of
-- tuples and the corresponding tuple layout.
let lyt = case L.tableCols schema of
_ N.:| [] -> LCol
cs -> LTuple $ map (const LCol) $ N.toList cs
return $ VShape (DelayedVec IDMap tab) lyt
--------------------------------------------------------------------------------
-- Shredding literal values
shredList :: [L.Val] -> Layout (PType, S.Seq SegD) -> (S.Seq VecVal, Layout (PType, S.Seq SegD))
shredList vs lyt = List.foldl' go (S.empty, lyt) vs
where
go (vvs, l) v = let (vv, l') = shredValue v l
in (vvs S.|> vv, l')
shredValue :: L.Val -> Layout (PType, S.Seq SegD) -> (VecVal, Layout (PType, S.Seq SegD))
shredValue (L.ListV vs) (LNest (ty, segs) lyt) =
let (seg, lyt') = shredList vs lyt
in (VTIndex, LNest (ty, segs S.|> seg) lyt')
shredValue (L.TupleV vs) (LTuple lyts) =
let (vvs, ls) = unzip $ zipWith shredValue vs lyts
in (VVTuple vvs, LTuple ls)
shredValue (L.ScalarV v) LCol =
(VVScalar v, LCol)
shredValue _ _ = $impossible
shredType :: T.Type -> Layout (PType, S.Seq SegD)
shredType (T.ScalarT _) = LCol
shredType (T.TupleT ts) = LTuple $ map shredType ts
shredType (T.ListT t) = LNest (payloadType t, S.empty) (shredType t)
payloadType :: T.Type -> PType
payloadType (T.ScalarT t) = PScalarT t
payloadType (T.TupleT ts) = PTupleT (N.fromList $ map payloadType ts)
payloadType (T.ListT _) = PIndexT
literalVectors :: Layout (PType, S.Seq SegD) -> VSLBuild TExpr TExpr (Layout DelayedVec)
literalVectors lyt = traverse go lyt
where
go (ty, segs) = DelayedVec IDMap <$> C.lit (ty, Segs segs)
literalShape :: Shape (PType, S.Seq SegD) -> VSLBuild TExpr TExpr (Shape DelayedVec)
literalShape (VShape (ty, segs) lyt) = do
let seg = if S.null segs then $impossible else S.index segs 0
shapeVec <- C.lit (ty, UnitSeg seg)
vecLyt <- literalVectors lyt
return $ VShape (DelayedVec IDMap shapeVec) vecLyt
shredLiteral :: T.Type -> L.Val -> VSLBuild TExpr TExpr (Shape DelayedVec)
shredLiteral (T.ListT t) (L.ListV vs) = literalShape $ VShape (payloadType t, S.singleton seg) lyt
where
initLyt = shredType t
(seg, lyt) = shredList vs initLyt
shredLiteral _ _ = $impossible
|
ulricha/dsh
|
src/Database/DSH/VSL/Builtins.hs
|
bsd-3-clause
| 25,491 | 0 | 21 | 6,476 | 8,961 | 4,466 | 4,495 | 431 | 5 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-missing-methods #-}
{-| Use this module to either:
* match `Text` with light-weight backtracking patterns, or:
* parse structured values from `Text`.
Example usage:
>>> :set -XOverloadedStrings
>>> match ("can" <|> "cat") "cat"
["cat"]
>>> match ("can" <|> "cat") "dog"
[]
>>> match (decimal `sepBy` ",") "1,2,3"
[[1,2,3]]
This pattern has unlimited backtracking, and will return as many solutions
as possible:
>>> match (prefix (star anyChar)) "123"
["123","12","1",""]
Use @do@ notation to structure more complex patterns:
>>> :{
let bit = ("0" *> pure False) <|> ("1" *> pure True) :: Pattern Bool;
portableBitMap = do
{ "P1"
; width <- spaces1 *> decimal
; height <- spaces1 *> decimal
; count width (count height (spaces1 *> bit))
};
in match (prefix portableBitMap) "P1\n2 2\n0 0\n1 0\n"
:}
[[[False,False],[True,False]]]
-}
module Turtle.Pattern (
-- * Pattern
Pattern
, match
-- * Primitive patterns
, anyChar
, eof
-- * Character patterns
, dot
, satisfy
, char
, notChar
, text
, asciiCI
, oneOf
, noneOf
, space
, spaces
, spaces1
, tab
, newline
, crlf
, upper
, lower
, alphaNum
, letter
, digit
, hexDigit
, octDigit
-- * Numbers
, decimal
, signed
-- * Combinators
, prefix
, suffix
, has
, invert
, once
, star
, plus
, selfless
, choice
, count
, upperBounded
, bounded
, option
, between
, skip
, within
, fixed
, sepBy
, sepBy1
-- * High-efficiency primitives
, chars
, chars1
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.State
import Data.Char
import Data.List (foldl')
import Data.Monoid (Monoid(..), (<>))
import Data.String (IsString(..))
import Data.Text (Text)
import qualified Data.Text as Text
-- | A fully backtracking pattern that parses an @\'a\'@ from some `Text`
newtype Pattern a = Pattern { runPattern :: StateT Text [] a }
deriving (Functor, Applicative, Monad, Alternative, MonadPlus)
instance Monoid a => Monoid (Pattern a) where
mempty = pure mempty
mappend = liftA2 mappend
-- | Pattern forms a semiring, this is the closest approximation
instance Monoid a => Num (Pattern a) where
fromInteger n = Pattern (lift (replicate (fromInteger n) mempty))
(+) = (<|>)
(*) = (<>)
instance (a ~ Text) => IsString (Pattern a) where
fromString str = text (Text.pack str)
{-| Match a `Pattern` against a `Text` input, returning all possible solutions
The `Pattern` must match the entire `Text`
-}
match :: Pattern a -> Text -> [a]
match p = evalStateT (runPattern (p <* eof))
{-| Match any character
>>> match anyChar "1"
"1"
>>> match anyChar ""
""
-}
anyChar :: Pattern Char
anyChar = Pattern (do
Just (c, cs) <- fmap Text.uncons get
put cs
return c )
{-| Matches the end of input
>>> match eof "1"
[]
>>> match eof ""
[()]
-}
eof :: Pattern ()
eof = Pattern (do
True <- fmap Text.null get
return () )
-- | Synonym for `anyChar`
dot :: Pattern Char
dot = anyChar
{-| Match any character that satisfies the given predicate
>>> match (satisfy (== '1')) "1"
"1"
>>> match (satisfy (== '2')) "1"
""
-}
satisfy :: (Char -> Bool) -> Pattern Char
satisfy predicate = do
c <- anyChar
guard (predicate c)
return c
{-| Match a specific character
>>> match (char '1') "1"
"1"
>>> match (char '2') "1"
""
-}
char :: Char -> Pattern Char
char c = satisfy (== c)
{-| Match any character except the given one
>>> match (notChar '2') "1"
"1"
>>> match (notChar '1') "1"
""
-}
notChar :: Char -> Pattern Char
notChar c = satisfy (/= c)
{-| Match a specific string
>>> match (text "123") "123"
["123"]
You can also omit the `text` function if you enable the @OverloadedStrings@
extension:
>>> match "123" "123"
["123"]
-}
text :: Text -> Pattern Text
text before' = Pattern (do
txt <- get
let (before, after) = Text.splitAt (Text.length before') txt
guard (before == before')
put after
return before)
{-| Match a specific string in a case-insensitive way
This only handles ASCII strings
>>> match (asciiCI "abc") "ABC"
["ABC"]
-}
asciiCI :: Text -> Pattern Text
asciiCI before' = Pattern (do
txt <- get
let (before, after) = Text.splitAt (Text.length before') txt
guard (lowerChars before == lowerChars before')
put after
return before )
where
lowerChars = Text.map lowerChar
lowerChar c | 'A' <= c && c <= 'Z' = chr (ord c + ord 'a' - ord 'A')
| otherwise = c
{-| Match any one of the given characters
>>> match (oneOf "1a") "1"
"1"
>>> match (oneOf "2a") "1"
""
-}
oneOf :: [Char] -> Pattern Char
oneOf cs = satisfy (`elem` cs)
{-| Match anything other than the given characters
>>> match (noneOf "2a") "1"
"1"
>>> match (noneOf "1a") "1"
""
-}
noneOf :: [Char] -> Pattern Char
noneOf cs = satisfy (`notElem` cs)
{-| Match a whitespace character
>>> match space " "
" "
>>> match space "1"
""
-}
space :: Pattern Char
space = satisfy isSpace
{-| Match zero or more whitespace characters
>>> match spaces " "
[" "]
>>> match spaces ""
[""]
-}
spaces :: Pattern Text
spaces = star space
{-| Match one or more whitespace characters
>>> match spaces1 " "
[" "]
>>> match spaces1 ""
[]
-}
spaces1 :: Pattern Text
spaces1 = plus space
{-| Match the tab character (@\'\t\'@)
>>> match tab "\t"
"\t"
>>> match tab " "
""
-}
tab :: Pattern Char
tab = char '\t'
{-| Match the newline character (@\'\n\'@)
>>> match newline "\n"
"\n"
>>> match newline " "
""
-}
newline :: Pattern Char
newline = char '\n'
{-| Matches a carriage return (@\'\r\'@) followed by a newline (@\'\n\'@)
>>> match crlf "\r\n"
["\r\n"]
>>> match crlf "\n\r"
[]
-}
crlf :: Pattern Text
crlf = text "\r\n"
{-| Match an uppercase letter
>>> match upper "A"
"A"
>>> match upper "a"
""
-}
upper :: Pattern Char
upper = satisfy isUpper
{-| Match a lowercase letter
>>> match lower "a"
"a"
>>> match lower "A"
""
-}
lower :: Pattern Char
lower = satisfy isLower
{-| Match a letter or digit
>>> match alphaNum "1"
"1"
>>> match alphaNum "a"
"a"
>>> match alphaNum "A"
"A"
>>> match alphaNum "."
""
-}
alphaNum :: Pattern Char
alphaNum = satisfy isAlphaNum
{-| Match a letter
>>> match letter "A"
"A"
>>> match letter "a"
"a"
>>> match letter "1"
""
-}
letter :: Pattern Char
letter = satisfy isLetter
{-| Match a digit
>>> match digit "1"
"1"
>>> match digit "a"
""
-}
digit :: Pattern Char
digit = satisfy isDigit
{-| Match a hexadecimal digit
>>> match hexDigit "1"
"1"
>>> match hexDigit "A"
"A"
>>> match hexDigit "a"
"a"
>>> match hexDigit "g"
""
-}
hexDigit :: Pattern Char
hexDigit = satisfy isHexDigit
{-| Match an octal digit
>>> match octDigit "1"
"1"
>>> match octDigit "9"
""
-}
octDigit :: Pattern Char
octDigit = satisfy isOctDigit
{-| Match an unsigned decimal number
>>> match decimal "123"
[123]
>>> match decimal "-123"
[]
-}
decimal :: Num n => Pattern n
decimal = do
ds <- some digit
return (foldl' step 0 ds)
where
step n d = n * 10 + fromIntegral (ord d - ord '0')
{-| Transform a numeric parser to accept an optional leading @\'+\'@ or @\'-\'@
sign
>>> match (signed decimal) "+123"
[123]
>>> match (signed decimal) "-123"
[-123]
>>> match (signed decimal) "123"
[123]
-}
signed :: Num a => Pattern a -> Pattern a
signed p = do
sign <- (char '+' *> pure id) <|> (char '-' *> pure negate) <|> (pure id)
fmap sign p
{-| @(`invert` p)@ succeeds if @p@ fails and fails if @p@ succeeds
>>> match (invert "A") "A"
[]
>>> match (invert "A") "B"
[()]
-}
invert :: Pattern a -> Pattern ()
invert p = Pattern (StateT (\str -> case runStateT (runPattern p) str of
[] -> [((), "")]
_ -> [] ))
{-| Match a `Char`, but return `Text`
>>> match (once (char '1')) "1"
["1"]
>>> match (once (char '1')) ""
[]
-}
once :: Pattern Char -> Pattern Text
once p = fmap Text.singleton p
{-| Use this to match the prefix of a string
>>> match "A" "ABC"
[]
>>> match (prefix "A") "ABC"
["A"]
-}
prefix :: Pattern a -> Pattern a
prefix p = p <* chars
{-| Use this to match the suffix of a string
>>> match "C" "ABC"
[]
>>> match (suffix "C") "ABC"
["C"]
-}
suffix :: Pattern a -> Pattern a
suffix p = chars *> p
{-| Use this to match the interior of a string
>>> match "B" "ABC"
[]
>>> match (has "B") "ABC"
["B"]
-}
has :: Pattern a -> Pattern a
has p = chars *> p <* chars
{-| Parse 0 or more occurrences of the given character
>>> match (star anyChar) "123"
["123"]
>>> match (star anyChar) ""
[""]
See also: `chars`
-}
star :: Pattern Char -> Pattern Text
star p = fmap Text.pack (many p)
{-| Parse 1 or more occurrences of the given character
>>> match (plus digit) "123"
["123"]
>>> match (plus digit) ""
[]
See also: `chars1`
-}
plus :: Pattern Char -> Pattern Text
plus p = fmap Text.pack (some p)
{-| Patterns that match multiple times are greedy by default, meaning that they
try to match as many times as possible. The `selfless` combinator makes a
pattern match as few times as possible
This only changes the order in which solutions are returned, by prioritizing
less greedy solutions
>>> match (prefix (selfless (some anyChar))) "123"
["1","12","123"]
>>> match (prefix (some anyChar) ) "123"
["123","12","1"]
-}
selfless :: Pattern a -> Pattern a
selfless p = Pattern (StateT (\s -> reverse (runStateT (runPattern p) s)))
{-| Apply the patterns in the list in order, until one of them succeeds
>>> match (choice ["cat", "dog", "egg"]) "egg"
["egg"]
>>> match (choice ["cat", "dog", "egg"]) "cat"
["cat"]
>>> match (choice ["cat", "dog", "egg"]) "fan"
[]
-}
choice :: [Pattern a] -> Pattern a
choice = msum
{-| Apply the given pattern a fixed number of times, collecting the results
>>> match (count 3 anyChar) "123"
["123"]
>>> match (count 4 anyChar) "123"
[]
-}
count :: Int -> Pattern a -> Pattern [a]
count = replicateM
{-| Apply the given pattern 0 or more times, up to a given bound,
collecting the results
>>> match (upperBounded 5 dot) "123"
["123"]
>>> match (upperBounded 2 dot) "123"
[]
>>> match ((,) <$> upperBounded 2 dot <*> chars) "123"
[("12","3"),("1","23")]
-}
upperBounded :: Int -> Pattern a -> Pattern [a]
upperBounded n p
| n <= 0 = mempty
| n == 1 = fmap pure p
| otherwise = (:) <$> p <*> option (upperBounded (n - 1) p)
{-| Apply the given pattern a number of times restricted by given
lower and upper bounds, collecting the results
>>> match (bounded 2 5 "cat") "catcatcat"
[["cat","cat","cat"]]
>>> match (bounded 2 5 "cat") "cat"
[]
>>> match (bounded 2 5 "cat") "catcatcatcatcatcat"
[]
`bounded` could be implemented naively as follows:
> bounded m n p = do
> x <- choice (map pure [m..n])
> count x p
-}
bounded :: Int -> Int -> Pattern a -> Pattern [a]
bounded m n p
| m == n = count m p
| m < n = (++) <$> count m p <*> option (upperBounded (n - m) p)
| otherwise = mzero
{-| Transform a parser to a succeed with an empty value instead of failing
See also: `optional`
>>> match (option "1" <> "2") "12"
["12"]
>>> match (option "1" <> "2") "2"
["2"]
-}
option :: Monoid a => Pattern a -> Pattern a
option p = p <|> mempty
{-| @(between open close p)@ matches @\'p\'@ in between @\'open\'@ and
@\'close\'@
>>> match (between (char '(') (char ')') (star anyChar)) "(123)"
["123"]
>>> match (between (char '(') (char ')') (star anyChar)) "(123"
[]
-}
between :: Pattern a -> Pattern b -> Pattern c -> Pattern c
between open close p = open *> p <* close
{-| Discard the pattern's result
>>> match (skip anyChar) "1"
[()]
>>> match (skip anyChar) ""
[]
-}
skip :: Pattern a -> Pattern ()
skip = void
{-| Restrict the pattern to consume no more than the given number of characters
>>> match (within 2 decimal) "12"
[12]
>>> match (within 2 decimal) "1"
[1]
>>> match (within 2 decimal) "123"
[]
-}
within :: Int -> Pattern a -> Pattern a
within n p = Pattern (do
txt <- get
let (before, after) = Text.splitAt n txt
put before
a <- runPattern p
modify (<> after)
return a )
{-| Require the pattern to consume exactly the given number of characters
>>> match (fixed 2 decimal) "12"
[12]
>>> match (fixed 2 decimal) "1"
[]
-}
fixed :: Int -> Pattern a -> Pattern a
fixed n p = do
txt <- Pattern get
guard (Text.length txt >= n)
within n (p <* eof)
{-| @p `sepBy` sep@ matches zero or more occurrences of @p@ separated by @sep@
>>> match (decimal `sepBy` char ',') "1,2,3"
[[1,2,3]]
>>> match (decimal `sepBy` char ',') ""
[[]]
-}
sepBy :: Pattern a -> Pattern b -> Pattern [a]
p `sepBy` sep = (p `sepBy1` sep) <|> pure []
{-| @p `sepBy1` sep@ matches one or more occurrences of @p@ separated by @sep@
>>> match (decimal `sepBy1` ",") "1,2,3"
[[1,2,3]]
>>> match (decimal `sepBy1` ",") ""
[]
-}
sepBy1 :: Pattern a -> Pattern b -> Pattern [a]
p `sepBy1` sep = (:) <$> p <*> many (sep *> p)
-- | Like @star dot@ or @star anyChar@, except more efficient
chars :: Pattern Text
chars = Pattern (StateT (\txt ->
reverse (zip (Text.inits txt) (Text.tails txt)) ))
-- | Like @plus dot@ or @plus anyChar@, except more efficient
chars1 :: Pattern Text
chars1 = Text.cons <$> dot <*> chars
|
bitemyapp/Haskell-Turtle-Library
|
src/Turtle/Pattern.hs
|
bsd-3-clause
| 13,573 | 0 | 15 | 3,134 | 2,507 | 1,299 | 1,208 | 212 | 2 |
import System.Plugins
import API
-- an example where we just want to load an object and run it
main = do
m_v <- load_ "../Null.o" ["../api",".."] "resource"
t <- load_ "../Dep.o" ["../api"] "resource"
case m_v of
LoadFailure err -> error (unlines err)
LoadSuccess m v -> do putStrLn ( show (a v) ) ; unloadAll m -- unloads Null.o but not Dep.o since we're still using it.
case t of
LoadFailure err -> error (unlines err)
LoadSuccess m v -> do putStrLn ( show (a v) ) ; unloadAll m
|
abuiles/turbinado-blog
|
tmp/dependencies/hs-plugins-1.3.1/testsuite/unloadAll/null/prog/Main.hs
|
bsd-3-clause
| 539 | 0 | 16 | 148 | 175 | 82 | 93 | 11 | 3 |
-- Example taken from Lee Pike's paper on SmartCheck:
-- https://github.com/leepike/SmartCheck/blob/master/paper/paper.pdf
--
-- An enumerative testing library, using a standard enumeration for integers,
-- won't be able to find a counter-example.
--
-- However, if we tweak the enumeration to include extremely large integers
-- (32767, 32766, 32765...) intercalated with extremely small integers a
-- counter-example is found quickly.
--
-- See the 'X' type from "Test.LeanCheck.Utils.Types" for more details.
--
-- The technque shown here could be applied in other enumerative property-based
-- testing tools.
import Test.LeanCheck
import Test.LeanCheck.Utils
import Data.Int
type I = [Int16]
data T = T I I I I I
deriving Show
toList :: T -> [[Int16]]
toList (T i j k l m) = [i,j,k,l,m]
pre :: T -> Bool
pre t = all ((< 256) . sum) (toList t)
post :: T -> Bool
post t = (sum . concat) (toList t) < 5 * 256
prop :: T -> Bool
prop t = pre t ==> post t
instance Listable T where
tiers = cons5 makeT
where
makeT (Xs i) (Xs j) (Xs k) (Xs l) (Xs m) = T i j k l m
main :: IO ()
main = do
checkFor 10000 $ prop
|
rudymatela/llcheck
|
eg/overflow.hs
|
bsd-3-clause
| 1,138 | 0 | 10 | 235 | 319 | 175 | 144 | 20 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.IAM.ListEntitiesForPolicy
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Lists all users, groups, and roles that the specified managed policy is
-- attached to.
--
-- You can use the optional 'EntityFilter' parameter to limit the results to a
-- particular type of entity (users, groups, or roles). For example, to list
-- only the roles that are attached to the specified policy, set 'EntityFilter' to 'Role'.
--
-- You can paginate the results using the 'MaxItems' and 'Marker' parameters.
--
-- <http://docs.aws.amazon.com/IAM/latest/APIReference/API_ListEntitiesForPolicy.html>
module Network.AWS.IAM.ListEntitiesForPolicy
(
-- * Request
ListEntitiesForPolicy
-- ** Request constructor
, listEntitiesForPolicy
-- ** Request lenses
, lefpEntityFilter
, lefpMarker
, lefpMaxItems
, lefpPathPrefix
, lefpPolicyArn
-- * Response
, ListEntitiesForPolicyResponse
-- ** Response constructor
, listEntitiesForPolicyResponse
-- ** Response lenses
, lefprIsTruncated
, lefprMarker
, lefprPolicyGroups
, lefprPolicyRoles
, lefprPolicyUsers
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.IAM.Types
import qualified GHC.Exts
data ListEntitiesForPolicy = ListEntitiesForPolicy
{ _lefpEntityFilter :: Maybe EntityType
, _lefpMarker :: Maybe Text
, _lefpMaxItems :: Maybe Nat
, _lefpPathPrefix :: Maybe Text
, _lefpPolicyArn :: Text
} deriving (Eq, Read, Show)
-- | 'ListEntitiesForPolicy' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lefpEntityFilter' @::@ 'Maybe' 'EntityType'
--
-- * 'lefpMarker' @::@ 'Maybe' 'Text'
--
-- * 'lefpMaxItems' @::@ 'Maybe' 'Natural'
--
-- * 'lefpPathPrefix' @::@ 'Maybe' 'Text'
--
-- * 'lefpPolicyArn' @::@ 'Text'
--
listEntitiesForPolicy :: Text -- ^ 'lefpPolicyArn'
-> ListEntitiesForPolicy
listEntitiesForPolicy p1 = ListEntitiesForPolicy
{ _lefpPolicyArn = p1
, _lefpEntityFilter = Nothing
, _lefpPathPrefix = Nothing
, _lefpMarker = Nothing
, _lefpMaxItems = Nothing
}
-- | The entity type to use for filtering the results.
--
-- For example, when 'EntityFilter' is 'Role', only the roles that are attached to
-- the specified policy are returned. This parameter is optional. If it is not
-- included, all attached entities (users, groups, and roles) are returned.
lefpEntityFilter :: Lens' ListEntitiesForPolicy (Maybe EntityType)
lefpEntityFilter = lens _lefpEntityFilter (\s a -> s { _lefpEntityFilter = a })
-- | Use this only when paginating results, and only in a subsequent request after
-- you've received a response where the results are truncated. Set it to the
-- value of the 'Marker' element in the response you just received.
lefpMarker :: Lens' ListEntitiesForPolicy (Maybe Text)
lefpMarker = lens _lefpMarker (\s a -> s { _lefpMarker = a })
-- | Use this only when paginating results to indicate the maximum number of
-- entities you want in the response. If there are additional entities beyond
-- the maximum you specify, the 'IsTruncated' response element is 'true'. This
-- parameter is optional. If you do not include it, it defaults to 100.
lefpMaxItems :: Lens' ListEntitiesForPolicy (Maybe Natural)
lefpMaxItems = lens _lefpMaxItems (\s a -> s { _lefpMaxItems = a }) . mapping _Nat
-- | The path prefix for filtering the results. This parameter is optional. If it
-- is not included, it defaults to a slash (/), listing all entities.
lefpPathPrefix :: Lens' ListEntitiesForPolicy (Maybe Text)
lefpPathPrefix = lens _lefpPathPrefix (\s a -> s { _lefpPathPrefix = a })
lefpPolicyArn :: Lens' ListEntitiesForPolicy Text
lefpPolicyArn = lens _lefpPolicyArn (\s a -> s { _lefpPolicyArn = a })
data ListEntitiesForPolicyResponse = ListEntitiesForPolicyResponse
{ _lefprIsTruncated :: Maybe Bool
, _lefprMarker :: Maybe Text
, _lefprPolicyGroups :: List "member" PolicyGroup
, _lefprPolicyRoles :: List "member" PolicyRole
, _lefprPolicyUsers :: List "member" PolicyUser
} deriving (Eq, Read, Show)
-- | 'ListEntitiesForPolicyResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lefprIsTruncated' @::@ 'Maybe' 'Bool'
--
-- * 'lefprMarker' @::@ 'Maybe' 'Text'
--
-- * 'lefprPolicyGroups' @::@ ['PolicyGroup']
--
-- * 'lefprPolicyRoles' @::@ ['PolicyRole']
--
-- * 'lefprPolicyUsers' @::@ ['PolicyUser']
--
listEntitiesForPolicyResponse :: ListEntitiesForPolicyResponse
listEntitiesForPolicyResponse = ListEntitiesForPolicyResponse
{ _lefprPolicyGroups = mempty
, _lefprPolicyUsers = mempty
, _lefprPolicyRoles = mempty
, _lefprIsTruncated = Nothing
, _lefprMarker = Nothing
}
-- | A flag that indicates whether there are more entities to list. If your
-- results were truncated, you can make a subsequent pagination request using
-- the 'Marker' request parameter to retrieve more entities in the list.
lefprIsTruncated :: Lens' ListEntitiesForPolicyResponse (Maybe Bool)
lefprIsTruncated = lens _lefprIsTruncated (\s a -> s { _lefprIsTruncated = a })
-- | If 'IsTruncated' is 'true', this element is present and contains the value to use
-- for the 'Marker' parameter in a subsequent pagination request.
lefprMarker :: Lens' ListEntitiesForPolicyResponse (Maybe Text)
lefprMarker = lens _lefprMarker (\s a -> s { _lefprMarker = a })
-- | A list of groups that the policy is attached to.
lefprPolicyGroups :: Lens' ListEntitiesForPolicyResponse [PolicyGroup]
lefprPolicyGroups =
lens _lefprPolicyGroups (\s a -> s { _lefprPolicyGroups = a })
. _List
-- | A list of roles that the policy is attached to.
lefprPolicyRoles :: Lens' ListEntitiesForPolicyResponse [PolicyRole]
lefprPolicyRoles = lens _lefprPolicyRoles (\s a -> s { _lefprPolicyRoles = a }) . _List
-- | A list of users that the policy is attached to.
lefprPolicyUsers :: Lens' ListEntitiesForPolicyResponse [PolicyUser]
lefprPolicyUsers = lens _lefprPolicyUsers (\s a -> s { _lefprPolicyUsers = a }) . _List
instance ToPath ListEntitiesForPolicy where
toPath = const "/"
instance ToQuery ListEntitiesForPolicy where
toQuery ListEntitiesForPolicy{..} = mconcat
[ "EntityFilter" =? _lefpEntityFilter
, "Marker" =? _lefpMarker
, "MaxItems" =? _lefpMaxItems
, "PathPrefix" =? _lefpPathPrefix
, "PolicyArn" =? _lefpPolicyArn
]
instance ToHeaders ListEntitiesForPolicy
instance AWSRequest ListEntitiesForPolicy where
type Sv ListEntitiesForPolicy = IAM
type Rs ListEntitiesForPolicy = ListEntitiesForPolicyResponse
request = post "ListEntitiesForPolicy"
response = xmlResponse
instance FromXML ListEntitiesForPolicyResponse where
parseXML = withElement "ListEntitiesForPolicyResult" $ \x -> ListEntitiesForPolicyResponse
<$> x .@? "IsTruncated"
<*> x .@? "Marker"
<*> x .@? "PolicyGroups" .!@ mempty
<*> x .@? "PolicyRoles" .!@ mempty
<*> x .@? "PolicyUsers" .!@ mempty
|
romanb/amazonka
|
amazonka-iam/gen/Network/AWS/IAM/ListEntitiesForPolicy.hs
|
mpl-2.0
| 8,080 | 0 | 20 | 1,639 | 1,041 | 622 | 419 | 103 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Snap.Internal.Http.Server.Session
( httpAcceptLoop
, httpSession
, snapToServerHandler
, BadRequestException(..)
, LengthRequiredException(..)
, TerminateSessionException(..)
) where
------------------------------------------------------------------------------
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>))
#endif
import Control.Arrow (first, second)
import Control.Concurrent (MVar, newEmptyMVar, putMVar, readMVar)
import Control.Exception (AsyncException, Exception, Handler (..), SomeException (..))
import qualified Control.Exception as E
import Control.Monad (join, unless, void, when, (>=>))
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Unsafe as S
import qualified Data.CaseInsensitive as CI
import Data.Int (Int64)
import Data.IORef (IORef, newIORef, readIORef, writeIORef)
import Data.List (foldl')
import qualified Data.Map as Map
import Data.Maybe (fromJust, fromMaybe, isNothing)
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid (mconcat)
#endif
import Data.Monoid ((<>))
import Data.Time.Format (formatTime)
import Data.Typeable (Typeable)
import Data.Version (showVersion)
import Data.Word (Word64, Word8)
import Foreign.Marshal.Utils (copyBytes)
import Foreign.Ptr (Ptr, castPtr, plusPtr)
import Foreign.Storable (pokeByteOff)
#if MIN_VERSION_time(1,5,0)
import Data.Time.Format (defaultTimeLocale)
#else
import System.Locale (defaultTimeLocale)
#endif
------------------------------------------------------------------------------
import Data.ByteString.Builder (Builder, byteString, char8, stringUtf8)
import Data.ByteString.Builder.Extra (flush)
import Data.ByteString.Builder.Internal (Buffer, defaultChunkSize, newBuffer)
import Data.ByteString.Builder.Prim (FixedPrim, primFixed, (>$<), (>*<))
import Data.ByteString.Builder.Prim.Internal (fixedPrim, size)
import System.IO.Streams (InputStream, OutputStream)
import qualified System.IO.Streams as Streams
------------------------------------------------------------------------------
import qualified Paths_snap_server as V
import Snap.Core (EscapeSnap (..))
import Snap.Core (Snap, runSnap)
import Snap.Internal.Core (fixupResponse)
import Snap.Internal.Http.Server.Clock (getClockTime)
import Snap.Internal.Http.Server.Common (eatException)
import Snap.Internal.Http.Server.Date (getDateString)
import Snap.Internal.Http.Server.Parser (IRequest (..), getStdConnection, getStdContentLength, getStdContentType, getStdCookie, getStdHost, getStdTransferEncoding, parseCookie, parseRequest, parseUrlEncoded, readChunkedTransferEncoding, writeChunkedTransferEncoding)
import Snap.Internal.Http.Server.Thread (SnapThread)
import qualified Snap.Internal.Http.Server.Thread as Thread
import Snap.Internal.Http.Server.TimeoutManager (TimeoutManager)
import qualified Snap.Internal.Http.Server.TimeoutManager as TM
import Snap.Internal.Http.Server.Types (AcceptFunc (..), PerSessionData (..), SendFileHandler, ServerConfig (..), ServerHandler)
import Snap.Internal.Http.Types (Cookie (..), HttpVersion, Method (..), Request (..), Response (..), ResponseBody (..), StreamProc, getHeader, headers, rspBodyToEnum, updateHeaders)
import Snap.Internal.Parsing (unsafeFromNat)
import Snap.Types.Headers (Headers)
import qualified Snap.Types.Headers as H
import System.IO.Unsafe (unsafePerformIO)
------------------------------------------------------------------------------
data TerminateSessionException = TerminateSessionException SomeException
deriving (Typeable, Show)
instance Exception TerminateSessionException
data BadRequestException = BadRequestException
deriving (Typeable, Show)
instance Exception BadRequestException
data LengthRequiredException = LengthRequiredException
deriving (Typeable, Show)
instance Exception LengthRequiredException
------------------------------------------------------------------------------
snapToServerHandler :: Snap a -> ServerHandler hookState
snapToServerHandler !snap !serverConfig !perSessionData !req =
runSnap snap logErr tickle req
where
logErr = _logError serverConfig . byteString
tickle = _twiddleTimeout perSessionData
------------------------------------------------------------------------------
mAX_HEADERS_SIZE :: Int64
mAX_HEADERS_SIZE = 256 * 1024
------------------------------------------------------------------------------
-- | For each cpu, we store:
-- * An accept thread
-- * A TimeoutManager
-- * An mvar to signal when the timeout thread is shutdown
data EventLoopCpu = EventLoopCpu
{ _acceptThread :: SnapThread
, _timeoutManager :: TimeoutManager
}
------------------------------------------------------------------------------
-- | The main Snap webserver loop. Given a server handler, configuration, and a
-- function to accept new connections, runs an HTTP loop forever over N
-- threads, until a ThreadKilled exception is received.
httpAcceptLoop :: forall hookState .
ServerHandler hookState -- ^ server handler
-> ServerConfig hookState -- ^ server config
-> AcceptFunc -- ^ accept function
-> IO ()
httpAcceptLoop serverHandler serverConfig acceptFunc = runLoops
where
--------------------------------------------------------------------------
logError = _logError serverConfig
nLoops = _numAcceptLoops serverConfig
defaultTimeout = _defaultTimeout serverConfig
--------------------------------------------------------------------------
logException :: Exception e => e -> IO ()
logException e =
logError $
mconcat [ byteString "got exception in httpAcceptFunc: "
, fromShow e
]
--------------------------------------------------------------------------
runLoops = E.bracket (mapM newLoop [0 .. (nLoops - 1)])
(mapM_ killLoop)
(mapM_ waitLoop)
--------------------------------------------------------------------------
loop :: TimeoutManager
-> (forall a. IO a -> IO a)
-> IO ()
loop tm loopRestore = eatException go
where
----------------------------------------------------------------------
handlers =
[ Handler $ \(e :: AsyncException) -> loopRestore (E.throwIO $! e)
, Handler $ \(e :: SomeException) -> logException e >> go
]
go = do
(sendFileHandler, localAddress, localPort, remoteAddress,
remotePort, readEnd, writeEnd,
cleanup) <- runAcceptFunc acceptFunc loopRestore
`E.catches` handlers
let threadLabel = S.concat [ "snap-server: client "
, remoteAddress
, ":"
, S.pack $ show remotePort
]
thMVar <- newEmptyMVar
th <- TM.register tm threadLabel $ \restore ->
eatException $
prep thMVar sendFileHandler localAddress localPort remoteAddress
remotePort readEnd writeEnd cleanup restore
putMVar thMVar th
go
prep :: MVar TM.TimeoutThread
-> SendFileHandler
-> ByteString
-> Int
-> ByteString
-> Int
-> InputStream ByteString
-> OutputStream ByteString
-> IO ()
-> (forall a . IO a -> IO a)
-> IO ()
prep thMVar sendFileHandler localAddress localPort remoteAddress
remotePort readEnd writeEnd cleanup restore =
do
connClose <- newIORef False
newConn <- newIORef True
let twiddleTimeout = unsafePerformIO $ do
th <- readMVar thMVar
return $! TM.modify th
let cleanupTimeout = readMVar thMVar >>= TM.cancel
let !psd = PerSessionData connClose
twiddleTimeout
newConn
sendFileHandler
localAddress
localPort
remoteAddress
remotePort
readEnd
writeEnd
restore (session psd)
`E.finally` cleanup
`E.finally` cleanupTimeout
--------------------------------------------------------------------------
session psd = do
buffer <- newBuffer defaultChunkSize
httpSession buffer serverHandler serverConfig psd
--------------------------------------------------------------------------
newLoop cpu = E.mask_ $ do
-- TODO(greg): move constant into config
tm <- TM.initialize (fromIntegral defaultTimeout) 2 getClockTime
let threadLabel = S.concat [ "snap-server: accept loop #"
, S.pack $ show cpu
]
tid <- Thread.forkOn threadLabel cpu $ loop tm
return $! EventLoopCpu tid tm
--------------------------------------------------------------------------
waitLoop (EventLoopCpu tid _) = Thread.wait tid
--------------------------------------------------------------------------
killLoop ev = E.uninterruptibleMask_ $ do
Thread.cancelAndWait tid
TM.stop tm
where
tid = _acceptThread ev
tm = _timeoutManager ev
------------------------------------------------------------------------------
httpSession :: forall hookState .
Buffer
-> ServerHandler hookState
-> ServerConfig hookState
-> PerSessionData
-> IO ()
httpSession !buffer !serverHandler !config !sessionData = loop
where
--------------------------------------------------------------------------
defaultTimeout = _defaultTimeout config
isSecure = _isSecure config
localHostname = _localHostname config
logAccess = _logAccess config
logError = _logError config
newRequestHook = _onNewRequest config
parseHook = _onParse config
userHandlerFinishedHook = _onUserHandlerFinished config
dataFinishedHook = _onDataFinished config
exceptionHook = _onException config
escapeHook = _onEscape config
--------------------------------------------------------------------------
forceConnectionClose = _forceConnectionClose sessionData
isNewConnection = _isNewConnection sessionData
localAddress = _localAddress sessionData
localPort = _localPort sessionData
remoteAddress = _remoteAddress sessionData
remotePort = _remotePort sessionData
readEnd = _readEnd sessionData
tickle f = _twiddleTimeout sessionData f
writeEnd = _writeEnd sessionData
sendfileHandler = _sendfileHandler sessionData
--------------------------------------------------------------------------
mkBuffer :: IO (OutputStream Builder)
mkBuffer = Streams.unsafeBuilderStream (return buffer) writeEnd
--------------------------------------------------------------------------
-- Begin HTTP session processing.
loop :: IO ()
loop = do
-- peek first to ensure startHook gets generated at the right time.
readEndAtEof >>= (flip unless $ do
hookState <- newRequestHook sessionData >>= newIORef
-- parse HTTP request
req <- receiveRequest
parseHook hookState req
processRequest hookState req)
------------------------------------------------------------------------------
readEndAtEof = Streams.read readEnd >>=
maybe (return True)
(\c -> if S.null c
then readEndAtEof
else Streams.unRead c readEnd >> return False)
{-# INLINE readEndAtEof #-}
--------------------------------------------------------------------------
-- Read the HTTP request from the socket, parse it, and pre-process it.
receiveRequest :: IO Request
receiveRequest = {-# SCC "httpSession/receiveRequest" #-} do
readEnd' <- Streams.throwIfProducesMoreThan mAX_HEADERS_SIZE readEnd
parseRequest readEnd' >>= toRequest
{-# INLINE receiveRequest #-}
--------------------------------------------------------------------------
toRequest :: IRequest -> IO Request
toRequest !ireq = {-# SCC "httpSession/toRequest" #-} do
-- HTTP spec section 14.23: "All Internet-based HTTP/1.1 servers MUST
-- respond with a 400 (Bad Request) status code to any HTTP/1.1 request
-- message which lacks a Host header field."
--
-- Here we interpret this slightly more liberally: if an absolute URI
-- including a hostname is given in the request line, we'll take that
-- if there's no Host header.
--
-- For HTTP/1.0 requests, we pick the configured local hostname by
-- default.
host <- maybe (if isHttp11
then badRequestWithNoHost
else return localHostname)
return mbHost
-- Call setupReadEnd, which handles transfer-encoding: chunked or
-- content-length restrictions, etc
!readEnd' <- setupReadEnd
-- Parse an application/x-www-form-urlencoded form, if it was sent
(!readEnd'', postParams) <- parseForm readEnd'
let allParams = Map.unionWith (++) queryParams postParams
-- Decide whether the connection should be closed after the response is
-- sent (stored in the forceConnectionClose IORef).
checkConnectionClose version $ getStdConnection stdHdrs
-- The request is now ready for processing.
return $! Request host
remoteAddress
remotePort
localAddress
localPort
localHost
isSecure
hdrs
readEnd''
mbCL
method
version
cookies
pathInfo
contextPath
uri
queryString
allParams
queryParams
postParams
where
----------------------------------------------------------------------
!method = iMethod ireq
!version = iHttpVersion ireq
!stdHdrs = iStdHeaders ireq
!hdrs = iRequestHeaders ireq
!isHttp11 = version >= (1, 1)
!mbHost = getStdHost stdHdrs
!localHost = fromMaybe localHostname mbHost
mbCL = unsafeFromNat <$>
getStdContentLength stdHdrs
!isChunked = (CI.mk <$> getStdTransferEncoding stdHdrs)
== Just "chunked"
cookies = fromMaybe [] (getStdCookie stdHdrs >>= parseCookie)
contextPath = "/"
!uri = iRequestUri ireq
queryParams = parseUrlEncoded queryString
emptyParams = Map.empty
----------------------------------------------------------------------
(pathInfo, queryString) = first dropLeadingSlash . second (S.drop 1)
$ S.break (== '?') uri
----------------------------------------------------------------------
dropLeadingSlash s = if S.null s
then s
else let !a = S.unsafeIndex s 0
in if a == 47 -- 47 == '/'
then S.unsafeDrop 1 s
else s
{-# INLINE dropLeadingSlash #-}
----------------------------------------------------------------------
-- | We have to transform the read end of the socket, to limit the
-- number of bytes read to the content-length, to decode chunked
-- transfer encoding, or to immediately yield EOF if the request body
-- is empty.
setupReadEnd :: IO (InputStream ByteString)
setupReadEnd =
if isChunked
then readChunkedTransferEncoding readEnd
else maybe (const noContentLength)
(Streams.takeBytes . fromIntegral) mbCL readEnd
{-# INLINE setupReadEnd #-}
----------------------------------------------------------------------
-- | If a request is not in chunked transfer encoding and lacks a
-- content-length, the request body is null string.
noContentLength :: IO (InputStream ByteString)
noContentLength = do
when (method == POST || method == PUT) return411
Streams.fromList []
----------------------------------------------------------------------
return411 = do
let (major, minor) = version
let resp = mconcat [ byteString "HTTP/"
, fromShow major
, char8 '.'
, fromShow minor
, byteString " 411 Length Required\r\n\r\n"
, byteString "411 Length Required\r\n"
, flush
]
writeEndB <- mkBuffer
Streams.write (Just resp) writeEndB
Streams.write Nothing writeEndB
terminateSession LengthRequiredException
----------------------------------------------------------------------
parseForm readEnd' = if hasForm
then getForm
else return (readEnd', emptyParams)
where
trimIt = fst . S.spanEnd (== ' ') . S.takeWhile (/= ';')
. S.dropWhile (== ' ')
mbCT = trimIt <$> getStdContentType stdHdrs
hasForm = mbCT == Just "application/x-www-form-urlencoded"
mAX_POST_BODY_SIZE = 1024 * 1024
getForm = do
readEnd'' <- Streams.throwIfProducesMoreThan
mAX_POST_BODY_SIZE readEnd'
contents <- S.concat <$> Streams.toList readEnd''
let postParams = parseUrlEncoded contents
finalReadEnd <- Streams.fromList [contents]
return (finalReadEnd, postParams)
----------------------------------------------------------------------
checkConnectionClose version connection = do
-- For HTTP/1.1: if there is an explicit Connection: close, we'll close
-- the socket later.
--
-- For HTTP/1.0: if there is no explicit Connection: Keep-Alive,
-- close the socket later.
let v = CI.mk <$> connection
when ((version == (1, 1) && v == Just "close") ||
(version == (1, 0) && v /= Just "keep-alive")) $
writeIORef forceConnectionClose True
--------------------------------------------------------------------------
{-# INLINE badRequestWithNoHost #-}
badRequestWithNoHost :: IO a
badRequestWithNoHost = do
let msg = mconcat [
byteString "HTTP/1.1 400 Bad Request\r\n\r\n"
, byteString "400 Bad Request: HTTP/1.1 request with no "
, byteString "Host header\r\n"
, flush
]
writeEndB <- mkBuffer
Streams.write (Just msg) writeEndB
Streams.write Nothing writeEndB
terminateSession BadRequestException
--------------------------------------------------------------------------
{-# INLINE checkExpect100Continue #-}
checkExpect100Continue req =
when (getHeader "expect" req == Just "100-continue") $ do
let v = if rqVersion req == (1,1) then "HTTP/1.1" else "HTTP/1.0"
let hl = byteString v <>
byteString " 100 Continue\r\n\r\n" <>
flush
os <- mkBuffer
Streams.write (Just hl) os
--------------------------------------------------------------------------
{-# INLINE processRequest #-}
processRequest !hookState !req = {-# SCC "httpSession/processRequest" #-} do
-- successfully parsed a request, so restart the timer
tickle $ max defaultTimeout
-- check for Expect: 100-continue
checkExpect100Continue req
b <- runServerHandler hookState req
`E.catches` [ Handler $ escapeSnapHandler hookState
, Handler $
catchUserException hookState "user handler" req
]
if b
then do writeIORef isNewConnection False
-- the timer resets to its default value here.
loop
else return $! ()
--------------------------------------------------------------------------
{-# INLINE runServerHandler #-}
runServerHandler !hookState !req = {-# SCC "httpSession/runServerHandler" #-} do
(req0, rsp0) <- serverHandler config sessionData req
userHandlerFinishedHook hookState req rsp0
-- check whether we should close the connection after sending the
-- response
let v = rqVersion req
let is_1_0 = (v == (1,0))
cc <- if is_1_0 && (isNothing $ rspContentLength rsp0)
then return $! True
else readIORef forceConnectionClose
-- skip unread portion of request body if rspTransformingRqBody is not
-- true
unless (rspTransformingRqBody rsp0) $ Streams.skipToEof (rqBody req)
!date <- getDateString
rsp1 <- fixupResponse req rsp0
let (!hdrs, !cc') = addDateAndServerHeaders is_1_0 date cc $
headers rsp1
let rsp = updateHeaders (const hdrs) rsp1
writeIORef forceConnectionClose cc'
bytesSent <- sendResponse req rsp `E.catch`
catchUserException hookState "sending-response" req
dataFinishedHook hookState req rsp
logAccess req0 rsp bytesSent
return $! not cc'
--------------------------------------------------------------------------
addDateAndServerHeaders !is1_0 !date !cc !hdrs =
{-# SCC "addDateAndServerHeaders" #-}
let (!hdrs', !newcc) = go [("date",date)] False cc
$ H.unsafeToCaseFoldedList hdrs
in (H.unsafeFromCaseFoldedList hdrs', newcc)
where
-- N.B.: here we know the date header has already been removed by
-- "fixupResponse".
go !l !seenServer !connClose [] =
let !l1 = if seenServer then l else (("server", sERVER_HEADER):l)
!l2 = if connClose then (("connection", "close"):l1) else l1
in (l2, connClose)
go l _ c (x@("server",_):xs) = go (x:l) True c xs
go l seenServer c (x@("connection", v):xs)
| c = go l seenServer c xs
| v == "close" || (is1_0 && v /= "keep-alive") =
go l seenServer True xs
| otherwise = go (x:l) seenServer c xs
go l seenServer c (x:xs) = go (x:l) seenServer c xs
--------------------------------------------------------------------------
escapeSnapHandler hookState (EscapeHttp escapeHandler) = do
escapeHook hookState
mkBuffer >>= escapeHandler tickle readEnd
return False
escapeSnapHandler _ (TerminateConnection e) = terminateSession e
--------------------------------------------------------------------------
catchUserException :: IORef hookState
-> ByteString
-> Request
-> SomeException
-> IO a
catchUserException hookState phase req e = do
logError $ mconcat [
byteString "Exception leaked to httpSession during phase '"
, byteString phase
, byteString "': \n"
, requestErrorMessage req e
]
-- Note: the handler passed to httpSession needs to catch its own
-- exceptions if it wants to avoid an ungracious exit here.
eatException $ exceptionHook hookState e
terminateSession e
--------------------------------------------------------------------------
sendResponse :: Request -> Response -> IO Word64
sendResponse !req !rsp = {-# SCC "httpSession/sendResponse" #-} do
let !v = rqVersion req
let !hdrs' = renderCookies rsp (headers rsp)
let !code = rspStatus rsp
let body = rspBody rsp
let needChunked = rqMethod req /= HEAD
&& isNothing (rspContentLength rsp)
&& code /= 204
&& code /= 304
let (hdrs'', body', shouldClose) = if needChunked
then noCL req hdrs' body
else (hdrs', body, False)
when shouldClose $ writeIORef forceConnectionClose $! True
let hdrPrim = mkHeaderPrim v rsp hdrs''
let hlen = size hdrPrim
let headerBuilder = primFixed hdrPrim $! ()
nBodyBytes <- case body' of
Stream s ->
whenStream headerBuilder hlen rsp s
SendFile f Nothing ->
whenSendFile headerBuilder rsp f 0
-- ignore end length here because we know we had a
-- content-length, use that instead.
SendFile f (Just (st, _)) ->
whenSendFile headerBuilder rsp f st
return $! nBodyBytes
--------------------------------------------------------------------------
noCL :: Request
-> Headers
-> ResponseBody
-> (Headers, ResponseBody, Bool)
noCL req hdrs body =
if v == (1,1)
then let origBody = rspBodyToEnum body
body' = \os -> do
os' <- writeChunkedTransferEncoding os
origBody os'
in ( H.set "transfer-encoding" "chunked" hdrs
, Stream body'
, False)
else
-- We've already noted that we have to close the socket earlier in
-- runServerHandler.
(hdrs, body, True)
where
v = rqVersion req
{-# INLINE noCL #-}
--------------------------------------------------------------------------
-- | If the response contains a content-length, make sure the response body
-- StreamProc doesn't yield more (or fewer) than the given number of bytes.
limitRspBody :: Int -- ^ header length
-> Response -- ^ response
-> OutputStream ByteString -- ^ write end of socket
-> IO (OutputStream ByteString)
limitRspBody hlen rsp os = maybe (return os) f $ rspContentLength rsp
where
f cl = Streams.giveExactly (fromIntegral hlen + fromIntegral cl) os
{-# INLINE limitRspBody #-}
--------------------------------------------------------------------------
whenStream :: Builder -- ^ headers
-> Int -- ^ header length
-> Response -- ^ response
-> StreamProc -- ^ output body
-> IO Word64 -- ^ returns number of bytes written
whenStream headerString hlen rsp body = do
-- note:
--
-- * precondition here is that we have a content-length and that we're
-- not using chunked transfer encoding.
--
-- * "headerString" includes http status line.
--
-- If you're transforming the request body, you have to manage your own
-- timeouts.
let t = if rspTransformingRqBody rsp
then return $! ()
else tickle $ max defaultTimeout
writeEnd0 <- Streams.ignoreEof writeEnd
(writeEnd1, getCount) <- Streams.countOutput writeEnd0
writeEnd2 <- limitRspBody hlen rsp writeEnd1
writeEndB <- Streams.unsafeBuilderStream (return buffer) writeEnd2 >>=
Streams.contramapM (\x -> t >> return x)
Streams.write (Just headerString) writeEndB
writeEnd' <- body writeEndB
Streams.write Nothing writeEnd'
-- Just in case the user handler didn't.
Streams.write Nothing writeEnd1
n <- getCount
return $! fromIntegral n - fromIntegral hlen
{-# INLINE whenStream #-}
--------------------------------------------------------------------------
whenSendFile :: Builder -- ^ headers
-> Response -- ^ response
-> FilePath -- ^ file to serve
-> Word64 -- ^ file start offset
-> IO Word64 -- ^ returns number of bytes written
whenSendFile headerString rsp filePath offset = do
let !cl = fromJust $ rspContentLength rsp
sendfileHandler buffer headerString filePath offset cl
return cl
{-# INLINE whenSendFile #-}
--------------------------------------------------------------------------
mkHeaderLine :: HttpVersion -> Response -> FixedPrim ()
mkHeaderLine outVer r =
case outCode of
200 | outVer == (1, 1) ->
-- typo in bytestring here
fixedPrim 17 $ const (void . cpBS "HTTP/1.1 200 OK\r\n")
200 | otherwise ->
fixedPrim 17 $ const (void . cpBS "HTTP/1.0 200 OK\r\n")
_ -> fixedPrim len $ const (void . line)
where
outCode = rspStatus r
v = if outVer == (1,1) then "HTTP/1.1 " else "HTTP/1.0 "
outCodeStr = S.pack $ show outCode
space !op = do
pokeByteOff op 0 (32 :: Word8)
return $! plusPtr op 1
line = cpBS v >=> cpBS outCodeStr >=> space >=> cpBS reason
>=> crlfPoke
reason = rspStatusReason r
len = 12 + S.length outCodeStr + S.length reason
------------------------------------------------------------------------------
mkHeaderPrim :: HttpVersion -> Response -> Headers -> FixedPrim ()
mkHeaderPrim v r hdrs = mkHeaderLine v r <+> headersToPrim hdrs
------------------------------------------------------------------------------
infixl 4 <+>
(<+>) :: FixedPrim () -> FixedPrim () -> FixedPrim ()
p1 <+> p2 = ignore >$< p1 >*< p2
where
ignore = join (,)
------------------------------------------------------------------------------
{-# INLINE headersToPrim #-}
headersToPrim :: Headers -> FixedPrim ()
headersToPrim hdrs = fixedPrim len (const copy)
where
len = H.foldedFoldl' f 0 hdrs + 2
where
f l k v = l + S.length k + S.length v + 4
copy = go $ H.unsafeToCaseFoldedList hdrs
go [] !op = void $ crlfPoke op
go ((k,v):xs) !op = do
!op' <- cpBS k op
pokeByteOff op' 0 (58 :: Word8) -- colon
pokeByteOff op' 1 (32 :: Word8) -- space
!op'' <- cpBS v $ plusPtr op' 2
crlfPoke op'' >>= go xs
{-# INLINE cpBS #-}
cpBS :: ByteString -> Ptr Word8 -> IO (Ptr Word8)
cpBS s !op = S.unsafeUseAsCStringLen s $ \(cstr, clen) -> do
let !cl = fromIntegral clen
copyBytes op (castPtr cstr) cl
return $! plusPtr op cl
{-# INLINE crlfPoke #-}
crlfPoke :: Ptr Word8 -> IO (Ptr Word8)
crlfPoke !op = do
pokeByteOff op 0 (13 :: Word8) -- cr
pokeByteOff op 1 (10 :: Word8) -- lf
return $! plusPtr op 2
------------------------------------------------------------------------------
sERVER_HEADER :: ByteString
sERVER_HEADER = S.concat ["Snap/", snapServerVersion]
------------------------------------------------------------------------------
snapServerVersion :: ByteString
snapServerVersion = S.pack $ showVersion $ V.version
------------------------------------------------------------------------------
terminateSession :: Exception e => e -> IO a
terminateSession = E.throwIO . TerminateSessionException . SomeException
------------------------------------------------------------------------------
requestErrorMessage :: Request -> SomeException -> Builder
requestErrorMessage req e =
mconcat [ byteString "During processing of request from "
, byteString $ rqClientAddr req
, byteString ":"
, fromShow $ rqClientPort req
, byteString "\nrequest:\n"
, fromShow $ show req
, byteString "\n"
, msgB
]
where
msgB = mconcat [
byteString "A web handler threw an exception. Details:\n"
, fromShow e
]
------------------------------------------------------------------------------
-- | Convert 'Cookie' into 'ByteString' for output.
cookieToBS :: Cookie -> ByteString
cookieToBS (Cookie k v mbExpTime mbDomain mbPath isSec isHOnly) = cookie
where
cookie = S.concat [k, "=", v, path, exptime, domain, secure, hOnly]
path = maybe "" (S.append "; path=") mbPath
domain = maybe "" (S.append "; domain=") mbDomain
exptime = maybe "" (S.append "; expires=" . fmt) mbExpTime
secure = if isSec then "; Secure" else ""
hOnly = if isHOnly then "; HttpOnly" else ""
fmt = S.pack . formatTime defaultTimeLocale
"%a, %d-%b-%Y %H:%M:%S GMT"
------------------------------------------------------------------------------
renderCookies :: Response -> Headers -> Headers
renderCookies r hdrs
| null cookies = hdrs
| otherwise = foldl' (\m v -> H.unsafeInsert "set-cookie" v m) hdrs cookies
where
cookies = fmap cookieToBS . Map.elems $ rspCookies r
------------------------------------------------------------------------------
fromShow :: Show a => a -> Builder
fromShow = stringUtf8 . show
|
sopvop/snap-server
|
src/Snap/Internal/Http/Server/Session.hs
|
bsd-3-clause
| 36,542 | 0 | 22 | 12,455 | 6,560 | 3,414 | 3,146 | 569 | 21 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE RecordWildCards #-}
module Stack.Options.GlobalParser where
import Options.Applicative
import Options.Applicative.Builder.Extra
import Path.IO (getCurrentDir, resolveDir', resolveFile')
import qualified Stack.Docker as Docker
import Stack.Init
import Stack.Prelude
import Stack.Options.ConfigParser
import Stack.Options.LogLevelParser
import Stack.Options.ResolverParser
import Stack.Options.Utils
import Stack.Types.Config
import Stack.Types.Docker
-- | Parser for global command-line options.
globalOptsParser :: FilePath -> GlobalOptsContext -> Maybe LogLevel -> Parser GlobalOptsMonoid
globalOptsParser currentDir kind defLogLevel =
GlobalOptsMonoid <$>
optionalFirst (strOption (long Docker.reExecArgName <> hidden <> internal)) <*>
optionalFirst (option auto (long dockerEntrypointArgName <> hidden <> internal)) <*>
(First <$> logLevelOptsParser hide0 defLogLevel) <*>
firstBoolFlagsTrue
"time-in-log"
"inclusion of timings in logs, for the purposes of using diff with logs"
hide <*>
configOptsParser currentDir kind <*>
optionalFirst (abstractResolverOptsParser hide0) <*>
pure (First Nothing) <*> -- resolver root is only set via the script command
optionalFirst (compilerOptsParser hide0) <*>
firstBoolFlagsNoDefault
"terminal"
"overriding terminal detection in the case of running in a false terminal"
hide <*>
option readStyles
(long "stack-colors" <>
long "stack-colours" <>
metavar "STYLES" <>
value mempty <>
help "Specify stack's output styles; STYLES is a colon-delimited \
\sequence of key=value, where 'key' is a style name and 'value' \
\is a semicolon-delimited list of 'ANSI' SGR (Select Graphic \
\Rendition) control codes (in decimal). Use 'stack ls \
\stack-colors --basic' to see the current sequence. In shells \
\where a semicolon is a command separator, enclose STYLES in \
\quotes." <>
hide) <*>
optionalFirst (option auto
(long "terminal-width" <>
metavar "INT" <>
help "Specify the width of the terminal, used for pretty-print messages" <>
hide)) <*>
optionalFirst
(strOption
(long "stack-yaml" <>
metavar "STACK-YAML" <>
completer (fileExtCompleter [".yaml"]) <>
help ("Override project stack.yaml file " <>
"(overrides any STACK_YAML environment variable)") <>
hide)) <*>
optionalFirst (option readLockFileBehavior
(long "lock-file" <>
help "Specify how to interact with lock files. Default: read/write. If resolver is overridden: read-only" <>
hide))
where
hide = hideMods hide0
hide0 = kind /= OuterGlobalOpts
-- | Create GlobalOpts from GlobalOptsMonoid.
globalOptsFromMonoid :: MonadIO m => Bool -> GlobalOptsMonoid -> m GlobalOpts
globalOptsFromMonoid defaultTerminal GlobalOptsMonoid{..} = do
resolver <- for (getFirst globalMonoidResolver) $ \ur -> do
root <-
case globalMonoidResolverRoot of
First Nothing -> getCurrentDir
First (Just dir) -> resolveDir' dir
resolvePaths (Just root) ur
stackYaml <-
case getFirst globalMonoidStackYaml of
Nothing -> pure SYLDefault
Just fp -> SYLOverride <$> resolveFile' fp
pure GlobalOpts
{ globalReExecVersion = getFirst globalMonoidReExecVersion
, globalDockerEntrypoint = getFirst globalMonoidDockerEntrypoint
, globalLogLevel = fromFirst defaultLogLevel globalMonoidLogLevel
, globalTimeInLog = fromFirstTrue globalMonoidTimeInLog
, globalConfigMonoid = globalMonoidConfigMonoid
, globalResolver = resolver
, globalCompiler = getFirst globalMonoidCompiler
, globalTerminal = fromFirst defaultTerminal globalMonoidTerminal
, globalStylesUpdate = globalMonoidStyles
, globalTermWidth = getFirst globalMonoidTermWidth
, globalStackYaml = stackYaml
, globalLockFileBehavior =
let defLFB =
case getFirst globalMonoidResolver of
Nothing -> LFBReadWrite
_ -> LFBReadOnly
in fromFirst defLFB globalMonoidLockFileBehavior
}
initOptsParser :: Parser InitOpts
initOptsParser =
InitOpts <$> searchDirs
<*> omitPackages
<*> overwrite <*> fmap not ignoreSubDirs
where
searchDirs =
many (textArgument
(metavar "DIR" <>
completer dirCompleter <>
help "Directories to include, default is current directory."))
ignoreSubDirs = switch (long "ignore-subdirs" <>
help "Do not search for .cabal files in sub directories")
overwrite = switch (long "force" <>
help "Force overwriting an existing stack.yaml")
omitPackages = switch (long "omit-packages" <>
help "Exclude conflicting or incompatible user packages")
|
juhp/stack
|
src/Stack/Options/GlobalParser.hs
|
bsd-3-clause
| 5,213 | 0 | 25 | 1,426 | 898 | 453 | 445 | 106 | 4 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="az-AZ">
<title>Custom Payloads Add-on</title>
<maps>
<homeID>custompayloads</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/custompayloads/src/main/javahelp/org/zaproxy/zap/extension/custompayloads/resources/help_az_AZ/helpset_az_AZ.hs
|
apache-2.0
| 978 | 77 | 67 | 157 | 413 | 209 | 204 | -1 | -1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
--
-- Stg to C--: heap management functions
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module StgCmmHeap (
getVirtHp, setVirtHp, setRealHp,
getHpRelOffset,
entryHeapCheck, altHeapCheck, noEscapeHeapCheck, altHeapCheckReturnsTo,
heapStackCheckGen,
entryHeapCheck',
mkStaticClosureFields, mkStaticClosure,
allocDynClosure, allocDynClosureCmm, allocHeapClosure,
emitSetDynHdr
) where
#include "HsVersions.h"
import StgSyn
import CLabel
import StgCmmLayout
import StgCmmUtils
import StgCmmMonad
import StgCmmProf (profDynAlloc, dynProfHdr, staticProfHdr)
import StgCmmTicky
import StgCmmClosure
import StgCmmEnv
import MkGraph
import Hoopl
import SMRep
import Cmm
import CmmUtils
import CostCentre
import IdInfo( CafInfo(..), mayHaveCafRefs )
import Id ( Id )
import Module
import DynFlags
import FastString( mkFastString, fsLit )
#if __GLASGOW_HASKELL__ >= 709
import Prelude hiding ((<*>))
#endif
import Control.Monad (when)
import Data.Maybe (isJust)
-----------------------------------------------------------
-- Initialise dynamic heap objects
-----------------------------------------------------------
allocDynClosure
:: Maybe Id
-> CmmInfoTable
-> LambdaFormInfo
-> CmmExpr -- Cost Centre to stick in the object
-> CmmExpr -- Cost Centre to blame for this alloc
-- (usually the same; sometimes "OVERHEAD")
-> [(NonVoid StgArg, VirtualHpOffset)] -- Offsets from start of object
-- ie Info ptr has offset zero.
-- No void args in here
-> FCode CmmExpr -- returns Hp+n
allocDynClosureCmm
:: Maybe Id -> CmmInfoTable -> LambdaFormInfo -> CmmExpr -> CmmExpr
-> [(CmmExpr, ByteOff)]
-> FCode CmmExpr -- returns Hp+n
-- allocDynClosure allocates the thing in the heap,
-- and modifies the virtual Hp to account for this.
-- The second return value is the graph that sets the value of the
-- returned LocalReg, which should point to the closure after executing
-- the graph.
-- allocDynClosure returns an (Hp+8) CmmExpr, and hence the result is
-- only valid until Hp is changed. The caller should assign the
-- result to a LocalReg if it is required to remain live.
--
-- The reason we don't assign it to a LocalReg here is that the caller
-- is often about to call regIdInfo, which immediately assigns the
-- result of allocDynClosure to a new temp in order to add the tag.
-- So by not generating a LocalReg here we avoid a common source of
-- new temporaries and save some compile time. This can be quite
-- significant - see test T4801.
allocDynClosure mb_id info_tbl lf_info use_cc _blame_cc args_w_offsets = do
let (args, offsets) = unzip args_w_offsets
cmm_args <- mapM getArgAmode args -- No void args
allocDynClosureCmm mb_id info_tbl lf_info
use_cc _blame_cc (zip cmm_args offsets)
allocDynClosureCmm mb_id info_tbl lf_info use_cc _blame_cc amodes_w_offsets = do
-- SAY WHAT WE ARE ABOUT TO DO
let rep = cit_rep info_tbl
tickyDynAlloc mb_id rep lf_info
let info_ptr = CmmLit (CmmLabel (cit_lbl info_tbl))
allocHeapClosure rep info_ptr use_cc amodes_w_offsets
-- | Low-level heap object allocation.
allocHeapClosure
:: SMRep -- ^ representation of the object
-> CmmExpr -- ^ info pointer
-> CmmExpr -- ^ cost centre
-> [(CmmExpr,ByteOff)] -- ^ payload
-> FCode CmmExpr -- ^ returns the address of the object
allocHeapClosure rep info_ptr use_cc payload = do
profDynAlloc rep use_cc
virt_hp <- getVirtHp
-- Find the offset of the info-ptr word
let info_offset = virt_hp + 1
-- info_offset is the VirtualHpOffset of the first
-- word of the new object
-- Remember, virtHp points to last allocated word,
-- ie 1 *before* the info-ptr word of new object.
base <- getHpRelOffset info_offset
emitComment $ mkFastString "allocHeapClosure"
emitSetDynHdr base info_ptr use_cc
-- Fill in the fields
hpStore base payload
-- Bump the virtual heap pointer
dflags <- getDynFlags
setVirtHp (virt_hp + heapClosureSizeW dflags rep)
return base
emitSetDynHdr :: CmmExpr -> CmmExpr -> CmmExpr -> FCode ()
emitSetDynHdr base info_ptr ccs
= do dflags <- getDynFlags
hpStore base (zip (header dflags) [0, wORD_SIZE dflags ..])
where
header :: DynFlags -> [CmmExpr]
header dflags = [info_ptr] ++ dynProfHdr dflags ccs
-- ToDof: Parallel stuff
-- No ticky header
-- Store the item (expr,off) in base[off]
hpStore :: CmmExpr -> [(CmmExpr, ByteOff)] -> FCode ()
hpStore base vals = do
dflags <- getDynFlags
sequence_ $
[ emitStore (cmmOffsetB dflags base off) val | (val,off) <- vals ]
-----------------------------------------------------------
-- Layout of static closures
-----------------------------------------------------------
-- Make a static closure, adding on any extra padding needed for CAFs,
-- and adding a static link field if necessary.
mkStaticClosureFields
:: DynFlags
-> CmmInfoTable
-> CostCentreStack
-> CafInfo
-> [CmmLit] -- Payload
-> [CmmLit] -- The full closure
mkStaticClosureFields dflags info_tbl ccs caf_refs payload
= mkStaticClosure dflags info_lbl ccs payload padding
static_link_field saved_info_field
where
info_lbl = cit_lbl info_tbl
-- CAFs must have consistent layout, regardless of whether they
-- are actually updatable or not. The layout of a CAF is:
--
-- 3 saved_info
-- 2 static_link
-- 1 indirectee
-- 0 info ptr
--
-- the static_link and saved_info fields must always be in the
-- same place. So we use isThunkRep rather than closureUpdReqd
-- here:
is_caf = isThunkRep (cit_rep info_tbl)
padding
| is_caf && null payload = [mkIntCLit dflags 0]
| otherwise = []
static_link_field
| is_caf || staticClosureNeedsLink (mayHaveCafRefs caf_refs) info_tbl
= [static_link_value]
| otherwise
= []
saved_info_field
| is_caf = [mkIntCLit dflags 0]
| otherwise = []
-- For a static constructor which has NoCafRefs, we set the
-- static link field to a non-zero value so the garbage
-- collector will ignore it.
static_link_value
| mayHaveCafRefs caf_refs = mkIntCLit dflags 0
| otherwise = mkIntCLit dflags 1 -- No CAF refs
mkStaticClosure :: DynFlags -> CLabel -> CostCentreStack -> [CmmLit]
-> [CmmLit] -> [CmmLit] -> [CmmLit] -> [CmmLit]
mkStaticClosure dflags info_lbl ccs payload padding static_link_field saved_info_field
= [CmmLabel info_lbl]
++ staticProfHdr dflags ccs
++ concatMap (padLitToWord dflags) payload
++ padding
++ static_link_field
++ saved_info_field
-- JD: Simon had ellided this padding, but without it the C back end asserts
-- failure. Maybe it's a bad assertion, and this padding is indeed unnecessary?
padLitToWord :: DynFlags -> CmmLit -> [CmmLit]
padLitToWord dflags lit = lit : padding pad_length
where width = typeWidth (cmmLitType dflags lit)
pad_length = wORD_SIZE dflags - widthInBytes width :: Int
padding n | n <= 0 = []
| n `rem` 2 /= 0 = CmmInt 0 W8 : padding (n-1)
| n `rem` 4 /= 0 = CmmInt 0 W16 : padding (n-2)
| n `rem` 8 /= 0 = CmmInt 0 W32 : padding (n-4)
| otherwise = CmmInt 0 W64 : padding (n-8)
-----------------------------------------------------------
-- Heap overflow checking
-----------------------------------------------------------
{- Note [Heap checks]
~~~~~~~~~~~~~~~~~~
Heap checks come in various forms. We provide the following entry
points to the runtime system, all of which use the native C-- entry
convention.
* gc() performs garbage collection and returns
nothing to its caller
* A series of canned entry points like
r = gc_1p( r )
where r is a pointer. This performs gc, and
then returns its argument r to its caller.
* A series of canned entry points like
gcfun_2p( f, x, y )
where f is a function closure of arity 2
This performs garbage collection, keeping alive the
three argument ptrs, and then tail-calls f(x,y)
These are used in the following circumstances
* entryHeapCheck: Function entry
(a) With a canned GC entry sequence
f( f_clo, x:ptr, y:ptr ) {
Hp = Hp+8
if Hp > HpLim goto L
...
L: HpAlloc = 8
jump gcfun_2p( f_clo, x, y ) }
Note the tail call to the garbage collector;
it should do no register shuffling
(b) No canned sequence
f( f_clo, x:ptr, y:ptr, ...etc... ) {
T: Hp = Hp+8
if Hp > HpLim goto L
...
L: HpAlloc = 8
call gc() -- Needs an info table
goto T }
* altHeapCheck: Immediately following an eval
Started as
case f x y of r { (p,q) -> rhs }
(a) With a canned sequence for the results of f
(which is the very common case since
all boxed cases return just one pointer
...
r = f( x, y )
K: -- K needs an info table
Hp = Hp+8
if Hp > HpLim goto L
...code for rhs...
L: r = gc_1p( r )
goto K }
Here, the info table needed by the call
to gc_1p should be the *same* as the
one for the call to f; the C-- optimiser
spots this sharing opportunity)
(b) No canned sequence for results of f
Note second info table
...
(r1,r2,r3) = call f( x, y )
K:
Hp = Hp+8
if Hp > HpLim goto L
...code for rhs...
L: call gc() -- Extra info table here
goto K
* generalHeapCheck: Anywhere else
e.g. entry to thunk
case branch *not* following eval,
or let-no-escape
Exactly the same as the previous case:
K: -- K needs an info table
Hp = Hp+8
if Hp > HpLim goto L
...
L: call gc()
goto K
-}
--------------------------------------------------------------
-- A heap/stack check at a function or thunk entry point.
entryHeapCheck :: ClosureInfo
-> Maybe LocalReg -- Function (closure environment)
-> Int -- Arity -- not same as len args b/c of voids
-> [LocalReg] -- Non-void args (empty for thunk)
-> FCode ()
-> FCode ()
entryHeapCheck cl_info nodeSet arity args code
= entryHeapCheck' is_fastf node arity args code
where
node = case nodeSet of
Just r -> CmmReg (CmmLocal r)
Nothing -> CmmLit (CmmLabel $ staticClosureLabel cl_info)
is_fastf = case closureFunInfo cl_info of
Just (_, ArgGen _) -> False
_otherwise -> True
-- | lower-level version for CmmParse
entryHeapCheck' :: Bool -- is a known function pattern
-> CmmExpr -- expression for the closure pointer
-> Int -- Arity -- not same as len args b/c of voids
-> [LocalReg] -- Non-void args (empty for thunk)
-> FCode ()
-> FCode ()
entryHeapCheck' is_fastf node arity args code
= do dflags <- getDynFlags
let is_thunk = arity == 0
args' = map (CmmReg . CmmLocal) args
stg_gc_fun = CmmReg (CmmGlobal GCFun)
stg_gc_enter1 = CmmReg (CmmGlobal GCEnter1)
{- Thunks: jump stg_gc_enter_1
Function (fast): call (NativeNode) stg_gc_fun(fun, args)
Function (slow): call (slow) stg_gc_fun(fun, args)
-}
gc_call upd
| is_thunk
= mkJump dflags NativeNodeCall stg_gc_enter1 [node] upd
| is_fastf
= mkJump dflags NativeNodeCall stg_gc_fun (node : args') upd
| otherwise
= mkJump dflags Slow stg_gc_fun (node : args') upd
updfr_sz <- getUpdFrameOff
loop_id <- newLabelC
emitLabel loop_id
heapCheck True True (gc_call updfr_sz <*> mkBranch loop_id) code
-- ------------------------------------------------------------
-- A heap/stack check in a case alternative
-- If there are multiple alts and we need to GC, but don't have a
-- continuation already (the scrut was simple), then we should
-- pre-generate the continuation. (if there are multiple alts it is
-- always a canned GC point).
-- altHeapCheck:
-- If we have a return continuation,
-- then if it is a canned GC pattern,
-- then we do mkJumpReturnsTo
-- else we do a normal call to stg_gc_noregs
-- else if it is a canned GC pattern,
-- then generate the continuation and do mkCallReturnsTo
-- else we do a normal call to stg_gc_noregs
altHeapCheck :: [LocalReg] -> FCode a -> FCode a
altHeapCheck regs code = altOrNoEscapeHeapCheck False regs code
altOrNoEscapeHeapCheck :: Bool -> [LocalReg] -> FCode a -> FCode a
altOrNoEscapeHeapCheck checkYield regs code = do
dflags <- getDynFlags
case cannedGCEntryPoint dflags regs of
Nothing -> genericGC checkYield code
Just gc -> do
lret <- newLabelC
let (off, _, copyin) = copyInOflow dflags NativeReturn (Young lret) regs []
lcont <- newLabelC
tscope <- getTickScope
emitOutOfLine lret (copyin <*> mkBranch lcont, tscope)
emitLabel lcont
cannedGCReturnsTo checkYield False gc regs lret off code
altHeapCheckReturnsTo :: [LocalReg] -> Label -> ByteOff -> FCode a -> FCode a
altHeapCheckReturnsTo regs lret off code
= do dflags <- getDynFlags
case cannedGCEntryPoint dflags regs of
Nothing -> genericGC False code
Just gc -> cannedGCReturnsTo False True gc regs lret off code
-- noEscapeHeapCheck is implemented identically to altHeapCheck (which
-- is more efficient), but cannot be optimized away in the non-allocating
-- case because it may occur in a loop
noEscapeHeapCheck :: [LocalReg] -> FCode a -> FCode a
noEscapeHeapCheck regs code = altOrNoEscapeHeapCheck True regs code
cannedGCReturnsTo :: Bool -> Bool -> CmmExpr -> [LocalReg] -> Label -> ByteOff
-> FCode a
-> FCode a
cannedGCReturnsTo checkYield cont_on_stack gc regs lret off code
= do dflags <- getDynFlags
updfr_sz <- getUpdFrameOff
heapCheck False checkYield (gc_call dflags gc updfr_sz) code
where
reg_exprs = map (CmmReg . CmmLocal) regs
-- Note [stg_gc arguments]
-- NB. we use the NativeReturn convention for passing arguments
-- to the canned heap-check routines, because we are in a case
-- alternative and hence the [LocalReg] was passed to us in the
-- NativeReturn convention.
gc_call dflags label sp
| cont_on_stack
= mkJumpReturnsTo dflags label NativeReturn reg_exprs lret off sp
| otherwise
= mkCallReturnsTo dflags label NativeReturn reg_exprs lret off sp []
genericGC :: Bool -> FCode a -> FCode a
genericGC checkYield code
= do updfr_sz <- getUpdFrameOff
lretry <- newLabelC
emitLabel lretry
call <- mkCall generic_gc (GC, GC) [] [] updfr_sz []
heapCheck False checkYield (call <*> mkBranch lretry) code
cannedGCEntryPoint :: DynFlags -> [LocalReg] -> Maybe CmmExpr
cannedGCEntryPoint dflags regs
= case map localRegType regs of
[] -> Just (mkGcLabel "stg_gc_noregs")
[ty]
| isGcPtrType ty -> Just (mkGcLabel "stg_gc_unpt_r1")
| isFloatType ty -> case width of
W32 -> Just (mkGcLabel "stg_gc_f1")
W64 -> Just (mkGcLabel "stg_gc_d1")
_ -> Nothing
| width == wordWidth dflags -> Just (mkGcLabel "stg_gc_unbx_r1")
| width == W64 -> Just (mkGcLabel "stg_gc_l1")
| otherwise -> Nothing
where
width = typeWidth ty
[ty1,ty2]
| isGcPtrType ty1
&& isGcPtrType ty2 -> Just (mkGcLabel "stg_gc_pp")
[ty1,ty2,ty3]
| isGcPtrType ty1
&& isGcPtrType ty2
&& isGcPtrType ty3 -> Just (mkGcLabel "stg_gc_ppp")
[ty1,ty2,ty3,ty4]
| isGcPtrType ty1
&& isGcPtrType ty2
&& isGcPtrType ty3
&& isGcPtrType ty4 -> Just (mkGcLabel "stg_gc_pppp")
_otherwise -> Nothing
-- Note [stg_gc arguments]
-- It might seem that we could avoid passing the arguments to the
-- stg_gc function, because they are already in the right registers.
-- While this is usually the case, it isn't always. Sometimes the
-- code generator has cleverly avoided the eval in a case, e.g. in
-- ffi/should_run/4221.hs we found
--
-- case a_r1mb of z
-- FunPtr x y -> ...
--
-- where a_r1mb is bound a top-level constructor, and is known to be
-- evaluated. The codegen just assigns x, y and z, and continues;
-- R1 is never assigned.
--
-- So we'll have to rely on optimisations to eliminatethese
-- assignments where possible.
-- | The generic GC procedure; no params, no results
generic_gc :: CmmExpr
generic_gc = mkGcLabel "stg_gc_noregs"
-- | Create a CLabel for calling a garbage collector entry point
mkGcLabel :: String -> CmmExpr
mkGcLabel s = CmmLit (CmmLabel (mkCmmCodeLabel rtsPackageKey (fsLit s)))
-------------------------------
heapCheck :: Bool -> Bool -> CmmAGraph -> FCode a -> FCode a
heapCheck checkStack checkYield do_gc code
= getHeapUsage $ \ hpHw ->
-- Emit heap checks, but be sure to do it lazily so
-- that the conditionals on hpHw don't cause a black hole
do { dflags <- getDynFlags
; let mb_alloc_bytes
| hpHw > 0 = Just (mkIntExpr dflags (hpHw * (wORD_SIZE dflags)))
| otherwise = Nothing
stk_hwm | checkStack = Just (CmmLit CmmHighStackMark)
| otherwise = Nothing
; codeOnly $ do_checks stk_hwm checkYield mb_alloc_bytes do_gc
; tickyAllocHeap True hpHw
; setRealHp hpHw
; code }
heapStackCheckGen :: Maybe CmmExpr -> Maybe CmmExpr -> FCode ()
heapStackCheckGen stk_hwm mb_bytes
= do updfr_sz <- getUpdFrameOff
lretry <- newLabelC
emitLabel lretry
call <- mkCall generic_gc (GC, GC) [] [] updfr_sz []
do_checks stk_hwm False mb_bytes (call <*> mkBranch lretry)
-- Note [Single stack check]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~
-- When compiling a function we can determine how much stack space it
-- will use. We therefore need to perform only a single stack check at
-- the beginning of a function to see if we have enough stack space.
--
-- The check boils down to comparing Sp-N with SpLim, where N is the
-- amount of stack space needed (see Note [Stack usage] below). *BUT*
-- at this stage of the pipeline we are not supposed to refer to Sp
-- itself, because the stack is not yet manifest, so we don't quite
-- know where Sp pointing.
-- So instead of referring directly to Sp - as we used to do in the
-- past - the code generator uses (old + 0) in the stack check. That
-- is the address of the first word of the old area, so if we add N
-- we'll get the address of highest used word.
--
-- This makes the check robust. For example, while we need to perform
-- only one stack check for each function, we could in theory place
-- more stack checks later in the function. They would be redundant,
-- but not incorrect (in a sense that they should not change program
-- behaviour). We need to make sure however that a stack check
-- inserted after incrementing the stack pointer checks for a
-- respectively smaller stack space. This would not be the case if the
-- code generator produced direct references to Sp. By referencing
-- (old + 0) we make sure that we always check for a correct amount of
-- stack: when converting (old + 0) to Sp the stack layout phase takes
-- into account changes already made to stack pointer. The idea for
-- this change came from observations made while debugging #8275.
-- Note [Stack usage]
-- ~~~~~~~~~~~~~~~~~~
-- At the moment we convert from STG to Cmm we don't know N, the
-- number of bytes of stack that the function will use, so we use a
-- special late-bound CmmLit, namely
-- CmmHighStackMark
-- to stand for the number of bytes needed. When the stack is made
-- manifest, the number of bytes needed is calculated, and used to
-- replace occurrences of CmmHighStackMark
--
-- The (Maybe CmmExpr) passed to do_checks is usually
-- Just (CmmLit CmmHighStackMark)
-- but can also (in certain hand-written RTS functions)
-- Just (CmmLit 8) or some other fixed valuet
-- If it is Nothing, we don't generate a stack check at all.
do_checks :: Maybe CmmExpr -- Should we check the stack?
-- See Note [Stack usage]
-> Bool -- Should we check for preemption?
-> Maybe CmmExpr -- Heap headroom (bytes)
-> CmmAGraph -- What to do on failure
-> FCode ()
do_checks mb_stk_hwm checkYield mb_alloc_lit do_gc = do
dflags <- getDynFlags
gc_id <- newLabelC
let
Just alloc_lit = mb_alloc_lit
bump_hp = cmmOffsetExprB dflags (CmmReg hpReg) alloc_lit
-- Sp overflow if ((old + 0) - CmmHighStack < SpLim)
-- At the beginning of a function old + 0 = Sp
-- See Note [Single stack check]
sp_oflo sp_hwm =
CmmMachOp (mo_wordULt dflags)
[CmmMachOp (MO_Sub (typeWidth (cmmRegType dflags spReg)))
[CmmStackSlot Old 0, sp_hwm],
CmmReg spLimReg]
-- Hp overflow if (Hp > HpLim)
-- (Hp has been incremented by now)
-- HpLim points to the LAST WORD of valid allocation space.
hp_oflo = CmmMachOp (mo_wordUGt dflags)
[CmmReg hpReg, CmmReg (CmmGlobal HpLim)]
alloc_n = mkAssign (CmmGlobal HpAlloc) alloc_lit
case mb_stk_hwm of
Nothing -> return ()
Just stk_hwm -> tickyStackCheck >> (emit =<< mkCmmIfGoto (sp_oflo stk_hwm) gc_id)
-- Emit new label that might potentially be a header
-- of a self-recursive tail call.
-- See Note [Self-recursive loop header].
self_loop_info <- getSelfLoop
case self_loop_info of
Just (_, loop_header_id, _)
| checkYield && isJust mb_stk_hwm -> emitLabel loop_header_id
_otherwise -> return ()
if (isJust mb_alloc_lit)
then do
tickyHeapCheck
emitAssign hpReg bump_hp
emit =<< mkCmmIfThen hp_oflo (alloc_n <*> mkBranch gc_id)
else do
when (checkYield && not (gopt Opt_OmitYields dflags)) $ do
-- Yielding if HpLim == 0
let yielding = CmmMachOp (mo_wordEq dflags)
[CmmReg (CmmGlobal HpLim),
CmmLit (zeroCLit dflags)]
emit =<< mkCmmIfGoto yielding gc_id
tscope <- getTickScope
emitOutOfLine gc_id
(do_gc, tscope) -- this is expected to jump back somewhere
-- Test for stack pointer exhaustion, then
-- bump heap pointer, and test for heap exhaustion
-- Note that we don't move the heap pointer unless the
-- stack check succeeds. Otherwise we might end up
-- with slop at the end of the current block, which can
-- confuse the LDV profiler.
-- Note [Self-recursive loop header]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Self-recursive loop header is required by loopification optimization (See
-- Note [Self-recursive tail calls] in StgCmmExpr). We emit it if:
--
-- 1. There is information about self-loop in the FCode environment. We don't
-- check the binder (first component of the self_loop_info) because we are
-- certain that if the self-loop info is present then we are compiling the
-- binder body. Reason: the only possible way to get here with the
-- self_loop_info present is from closureCodeBody.
--
-- 2. checkYield && isJust mb_stk_hwm. checkYield tells us that it is possible
-- to preempt the heap check (see #367 for motivation behind this check). It
-- is True for heap checks placed at the entry to a function and
-- let-no-escape heap checks but false for other heap checks (eg. in case
-- alternatives or created from hand-written high-level Cmm). The second
-- check (isJust mb_stk_hwm) is true for heap checks at the entry to a
-- function and some heap checks created in hand-written Cmm. Otherwise it
-- is Nothing. In other words the only situation when both conditions are
-- true is when compiling stack and heap checks at the entry to a
-- function. This is the only situation when we want to emit a self-loop
-- label.
|
gcampax/ghc
|
compiler/codeGen/StgCmmHeap.hs
|
bsd-3-clause
| 25,474 | 0 | 20 | 7,157 | 3,737 | 1,937 | 1,800 | 298 | 8 |
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Control.Concurrent.STM
import qualified Control.Exception as E
main = do
x <- atomically (newTVar 0)
y <- atomically (newTVar 1)
atomically (always $ invariant x y)
updates x y `E.catch` \(e :: E.SomeException) -> print e
print =<< atomically (readTVar x)
print =<< atomically (readTVar y)
updates x y = do
putStrLn "first"
atomically (writeTVar x 25) -- this should add `y' to the watched variables for the invariant
putStrLn "second"
atomically (writeTVar y 10) -- this should fail
putStrLn "third"
atomically (writeTVar x 25)
putStrLn "fourth"
-- check that x*y < 100
invariant :: TVar Integer -> TVar Integer -> STM Bool
invariant x y = do
xv <- readTVar x
if xv == 0 then return True
else do
yv <- readTVar y
return (xv*yv < 100)
|
seereason/ghcjs
|
test/conc/stm004.hs
|
mit
| 875 | 0 | 13 | 214 | 305 | 144 | 161 | 26 | 2 |
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
{-# LANGUAGE TypeFamilies, EmptyCase, LambdaCase #-}
{-# LANGUAGE UndecidableInstances #-}
-- Check some DataFamilies, warning appearance and other stuff
module EmptyCase005 where
data Void
newtype Void2 = Void2 Void
data Void3 = Void3 Void
-- Exhaustive
f1 :: Void2 -> Bool
f1 x = case x of {}
-- > f1 undefined
-- *** Exception: Prelude.undefined
--
-- > f1 (Void2 undefined)
-- *** Exception: Prelude.undefined
-- Non-exhaustive: missing (Void3 _)
f2 :: Void3 -> Bool
f2 x = case x of {}
-- > f2 undefined
-- *** Exception: Prelude.undefined
--
-- > f2 (Void3 undefined)
-- *** Exception: Void.hs:31:7-10: Non-exhaustive patterns in case
newtype V1 = V1 Void
newtype V2 = V2 V1
newtype V3 = V3 V2
newtype V4 = V4 V3
-- Exhaustive
f3 :: V4 -> Bool
f3 x = case x of {}
-- > v undefined
-- *** Exception: Prelude.undefined
--
-- > v (V4 undefined)
-- *** Exception: Prelude.undefined
--
-- > v (V4 (V3 undefined))
-- *** Exception: Prelude.undefined
--
-- > v (V4 (V3 (V2 undefined)))
-- *** Exception: Prelude.undefined
--
-- > v (V4 (V3 (V2 (V1 undefined))))
-- *** Exception: Prelude.undefined
-- Exhaustive
type family A a
type instance A Bool = V4
f4 :: A Bool -> Bool
f4 x = case x of {}
data family T a
data instance T () = T1 | T2
-- Non-exhaustive: missing both T1 & T2
f5 :: T () -> Bool
f5 x = case x of {}
newtype instance T Bool = MkTBool Bool
-- Non-exhaustive: missing both (MkTBool True) & (MkTBool False)
f6 :: T Bool -> Bool
f6 x = case x of {}
newtype instance T Int = MkTInt Char
-- Non-exhaustive: missing (MkTInt _)
f7 :: T Int -> Bool
f7 x = case x of {}
newtype V = MkV Bool
type family F a
type instance F Bool = V
type family G a
type instance G Int = F Bool
-- Non-exhaustive: missing MkV True & MkV False
f8 :: G Int -> Bool
f8 x = case x of {}
type family H a
type instance H Int = H Bool
type instance H Bool = H Char
-- Non-exhaustive: missing (_ :: H Char)
-- (H Int), (H Bool) and (H Char) are all the same and stuck, but we want to
-- show the latest rewrite, that is, (H Char) and not (H Int) or (H Bool).
f9 :: H Int -> Bool
f9 x = case x of {}
|
ezyang/ghc
|
testsuite/tests/pmcheck/should_compile/EmptyCase005.hs
|
bsd-3-clause
| 2,154 | 0 | 7 | 453 | 461 | 279 | 182 | -1 | -1 |
module Main where
import UnitTesting.UnitTests
import Test.QuickCheck
main :: IO ()
main = quickCheck (prop_idempotent :: [Integer] -> Bool)
|
iduhetonas/haskell-projects
|
main.hs
|
mit
| 142 | 0 | 8 | 20 | 45 | 26 | 19 | 5 | 1 |
{-
Copyright (c) 2006-2011 John Goerzen <[email protected]>
All rights reserved.
For license and copyright information, see the file LICENSE
-}
{- |
Module : Data.Quantity
Copyright : Copyright (C) 2006-2011 John Goerzen
License : BSD3
Maintainer : John Goerzen <[email protected]>
Stability : provisional
Portability: portable
Tools for rendering sizes
Written by John Goerzen, jgoerzen\@complete.org -}
module Data.Quantity (
renderNum,
renderNums,
parseNum,
parseNumInt,
quantifyNum,
quantifyNums,
SizeOpts(..),
binaryOpts,
siOpts
)
where
import Data.List
import Text.Printf
import Data.Char
{- | The options for 'quantifyNum' and 'renderNum' -}
data SizeOpts = SizeOpts { base :: Int, -- ^ The base from which calculations are made
powerIncr :: Int, -- ^ The increment to the power for each new suffix
firstPower :: Int, -- ^ The first power for which suffixes are given
suffixes :: String -- ^ The suffixes themselves
}
{- | Predefined definitions for byte measurement in groups of 1024, from 0 to
2**80 -}
binaryOpts :: SizeOpts
binaryOpts = SizeOpts {base = 2,
firstPower = 0,
suffixes = " KMGTPEZY",
powerIncr = 10}
{- | Predefined definitions for SI measurement, from 10**-24 to 10**24. -}
siOpts :: SizeOpts
siOpts = SizeOpts {base = 10,
firstPower = -24,
suffixes = "yzafpnum kMGTPEZY",
powerIncr = 3}
{- | Takes a number and returns a new (quantity, suffix) combination.
The space character is used as the suffix for items around 0. -}
quantifyNum :: (Ord a, Real a, Floating b, Ord b) => SizeOpts -> a -> (b, Char)
quantifyNum opts n = (\(x, s) -> (head x, s)) $ quantifyNums opts [n]
{- | Like 'quantifyNum', but takes a list of numbers. The first number in
the list will be evaluated for the suffix. The same suffix and scale will
be used for the remaining items in the list. Please see 'renderNums' for
an example of how this works.
It is invalid to use this function on an empty list. -}
quantifyNums :: (Ord a, Real a, Floating b, Ord b) => SizeOpts -> [a] -> ([b], Char)
quantifyNums _ [] = error "Attempt to use quantifyNums on an empty list"
quantifyNums opts (headnum:xs) =
(map (\n -> procnum n) (headnum:xs), suffix)
where number = case fromRational . toRational $ headnum of
0 -> 1
x -> x
incrList = map idx2pwr [0..length (suffixes opts) - 1]
incrIdxList = zip incrList [0..]
idx2pwr i = i * powerIncr opts + firstPower opts
finderfunc (x, _) = (fromIntegral $ base opts) ** (fromIntegral x)
<= (abs number)
-- Find the largest item that does not exceed the number given.
-- If the number is larger than the larger item in the list,
-- that's fine; we'll just write it in terms of what we have.
(usedexp, expidx) =
case find finderfunc (reverse incrIdxList) of
Just x -> x
Nothing -> head incrIdxList -- If not found, it's smaller than the first
suffix = (suffixes opts !! (fromIntegral expidx))
procnum n = (fromRational . toRational $ n) /
((fromIntegral (base opts) ** (fromIntegral usedexp)))
--(posres, possuf) = quantifyNum opts (headnum * (-1))
{- | Render a number into a string, based on the given quantities. This is
useful for displaying quantities in terms of bytes or in SI units. Give this
function the 'SizeOpts' for the desired output, and a precision (number of
digits to the right of the decimal point), and you get a string output.
Here are some examples:
> Data.Quantity> renderNum binaryOpts 0 1048576
> "1M"
> Data.Quantity> renderNum binaryOpts 2 10485760
> "10.00M"
> Data.Quantity> renderNum binaryOpts 3 1048576
> "1.000M"
> Data.Quantity> renderNum binaryOpts 3 1500000
> "1.431M"
> Data.Quantity> renderNum binaryOpts 2 (1500 ** 3)
> "3.14G"
> Data.Quantity> renderNum siOpts 2 1024
> "1.02k"
> Data.Quantity> renderNum siOpts 2 1048576
> "1.05M"
> Data.Quantity> renderNum siOpts 2 0.001
> "1.00m"
> Data.Quantity> renderNum siOpts 2 0.0001
> "100.00u"
If you want more control over the output, see 'quantifyNum'. -}
renderNum :: (Ord a, Real a) =>
SizeOpts
-> Int -- ^ Precision of the result
-> a -- ^ The number to examine
-> String
renderNum opts prec number =
(printf ("%." ++ show prec ++ "g") num) ++ [suffix]
where (num, suffix) = (quantifyNum opts number)::(Double, Char)
{- | Like 'renderNum', but operates on a list of numbers. The first number
in the list will be evaluated for the suffix. The same suffix and scale will
be used for the remaining items in the list. See 'renderNum' for more
examples.
Also, unlike 'renderNum', the %f instead of %g printf format is used so that
\"scientific\" notation is avoided in the output.
Examples:
> *Data.Quantity> renderNums binaryOpts 3 [1500000, 10240, 104857600]
> ["1.431M","0.010M","100.000M"]
> *Data.Quantity> renderNums binaryOpts 3 [1500, 10240, 104857600]
> ["1.465K","10.000K","102400.000K"]
-}
renderNums :: (Ord a, Real a) =>
SizeOpts
-> Int -- ^ Prevision of the result
-> [a] -- ^ The numbers to examine
-> [String] -- ^ Result
renderNums opts prec numbers =
map printit convnums
where printit num =
(printf ("%." ++ show prec ++ "f") num) ++ [suffix]
(convnums, suffix) =
(quantifyNums opts numbers)::([Double], Char)
{- | Parses a String, possibly generated by 'renderNum'. Parses the suffix
and applies it to the number, which is read via the Read class.
Returns Left "error message" on error, or Right number on successful parse.
If you want an Integral result, the convenience function 'parseNumInt' is for
you.
-}
parseNum :: (Read a, Fractional a) =>
SizeOpts -- ^ Information on how to parse this data
-> Bool -- ^ Whether to perform a case-insensitive match
-> String -- ^ The string to parse
-> Either String a
parseNum opts insensitive inp =
case reads inp of
[] -> Left "Couldn't parse numeric component of input"
[(num, "")] -> Right num -- No suffix; pass number unhindered
[(num, [suffix])] ->
case lookup (caseTransformer suffix) suffixMap of
Nothing -> Left $ "Unrecognized suffix " ++ show suffix
Just power -> Right $ num * multiplier power
[(_, suffix)] -> Left $ "Multi-character suffix " ++ show suffix
_ -> Left "Multiple parses for input"
where suffixMap = zip (map caseTransformer . suffixes $ opts)
(iterate (+ (powerIncr opts)) (firstPower opts))
caseTransformer x
| insensitive = toLower x
| otherwise = x
multiplier :: (Read a, Fractional a) => Int -> a
multiplier power =
fromRational . toRational $
fromIntegral (base opts) ** fromIntegral power
{- | Parse a number as with 'parseNum', but return the result as
an 'Integral'. Any type such as Integer, Int, etc. can be used for the
result type.
This function simply calls 'round' on the result of 'parseNum'. A
'Double' is used internally for the parsing of the numeric component.
By using this function, a user can still say something like 1.5M and get an
integral result. -}
parseNumInt :: (Read a, Integral a) =>
SizeOpts -- ^ Information on how to parse this data
-> Bool -- ^ Whether to perform a case-insensitive match
-> String -- ^ The string to parse
-> Either String a
parseNumInt opts insensitive inp =
case (parseNum opts insensitive inp)::Either String Double of
Left x -> Left x
Right n -> Right (round n)
|
haskellbr/missingh
|
missingh-all/src/Data/Quantity.hs
|
mit
| 8,470 | 0 | 13 | 2,628 | 1,302 | 708 | 594 | 100 | 6 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.HTMLBaseFontElement
(js_setColor, setColor, js_getColor, getColor, js_setFace, setFace,
js_getFace, getFace, js_setSize, setSize, js_getSize, getSize,
HTMLBaseFontElement, castToHTMLBaseFontElement,
gTypeHTMLBaseFontElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"color\"] = $2;" js_setColor
:: JSRef HTMLBaseFontElement -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLBaseFontElement.color Mozilla HTMLBaseFontElement.color documentation>
setColor ::
(MonadIO m, ToJSString val) => HTMLBaseFontElement -> val -> m ()
setColor self val
= liftIO
(js_setColor (unHTMLBaseFontElement self) (toJSString val))
foreign import javascript unsafe "$1[\"color\"]" js_getColor ::
JSRef HTMLBaseFontElement -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLBaseFontElement.color Mozilla HTMLBaseFontElement.color documentation>
getColor ::
(MonadIO m, FromJSString result) => HTMLBaseFontElement -> m result
getColor self
= liftIO
(fromJSString <$> (js_getColor (unHTMLBaseFontElement self)))
foreign import javascript unsafe "$1[\"face\"] = $2;" js_setFace ::
JSRef HTMLBaseFontElement -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLBaseFontElement.face Mozilla HTMLBaseFontElement.face documentation>
setFace ::
(MonadIO m, ToJSString val) => HTMLBaseFontElement -> val -> m ()
setFace self val
= liftIO (js_setFace (unHTMLBaseFontElement self) (toJSString val))
foreign import javascript unsafe "$1[\"face\"]" js_getFace ::
JSRef HTMLBaseFontElement -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLBaseFontElement.face Mozilla HTMLBaseFontElement.face documentation>
getFace ::
(MonadIO m, FromJSString result) => HTMLBaseFontElement -> m result
getFace self
= liftIO
(fromJSString <$> (js_getFace (unHTMLBaseFontElement self)))
foreign import javascript unsafe "$1[\"size\"] = $2;" js_setSize ::
JSRef HTMLBaseFontElement -> Int -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLBaseFontElement.size Mozilla HTMLBaseFontElement.size documentation>
setSize :: (MonadIO m) => HTMLBaseFontElement -> Int -> m ()
setSize self val
= liftIO (js_setSize (unHTMLBaseFontElement self) val)
foreign import javascript unsafe "$1[\"size\"]" js_getSize ::
JSRef HTMLBaseFontElement -> IO Int
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLBaseFontElement.size Mozilla HTMLBaseFontElement.size documentation>
getSize :: (MonadIO m) => HTMLBaseFontElement -> m Int
getSize self = liftIO (js_getSize (unHTMLBaseFontElement self))
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/JSFFI/Generated/HTMLBaseFontElement.hs
|
mit
| 3,576 | 42 | 11 | 506 | 831 | 473 | 358 | 56 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Oczor.Converter.CodeGenAst (module Oczor.Converter.CodeGenAst) where
import Data.Functor.Foldable.TH
import Data.Functor.Foldable
import ClassyPrelude
import Oczor.Utl
type Name = String
data Lits =
LitNull |
LitBool Bool |
LitChar Char |
LitDouble Double |
LitInt Int |
LitString String
deriving (Eq, Ord, Show)
data Ast =
None |
Lit Lits |
UniqObject String |
Ident Name |
NotEqual Ast Ast |
Operator String [Ast] |
Equal Ast Ast |
Var Name Ast |
Set Ast Ast |
Throw String |
Scope [Ast] Ast |
StmtList [Ast] |
BoolAnds [Ast] |
Array [Ast] |
Return Ast |
HasField Ast Name |
Label Name Ast |
Field Ast Name |
ConditionOperator Ast Ast Ast |
Code String |
Call Ast [Ast] |
Parens Ast |
If Ast [Ast] [Ast] |
Object [(Name, Ast)] |
Function [String] [Ast]
deriving (Show, Eq, Ord)
makeBaseFunctor ''Ast
scopeToFunc (ScopeF [] y) = y
scopeToFunc (ScopeF x y) = CallF (Parens (Function [] (embed <$> x <> [ReturnF $ embed y]))) []
-- pattern Scope x <- Function _ x
getVarName (Var x _) = Just x
getVarName _ = Nothing
isFunction Function{} = True
isFunction _ = False
astToList (StmtList x) = x
astToList x = [x]
litString = Lit . LitString
setField obj label expr = Set (Field obj label) expr
emptyObject = Object []
containsIdents :: [String] -> Ast -> [String]
containsIdents list = cata $ \case
IdentF x | oelem x list -> [x]
x -> ffold x
|
ptol/oczor
|
src/Oczor/Converter/CodeGenAst.hs
|
mit
| 1,496 | 0 | 14 | 335 | 571 | 316 | 255 | -1 | -1 |
module Parser.Reader where
import Text.ParserCombinators.Parsec (parse)
import Parser.Parser
import Parser.Types.LispVal
readExpr :: String -> LispVal
readExpr input = case parse parseExpr "lisp" input of
Left err -> String $ "No match: " ++ show err
Right val -> val
|
slogsdon/haskell-exercises
|
write-a-scheme/evaluation-part1/src/Parser/Reader.hs
|
mit
| 310 | 0 | 9 | 81 | 85 | 45 | 40 | 8 | 2 |
module Data.TargetPlatform
( allTargetPlatforms
, TargetPlatform (..)
) where
import Data.Char
import Data.Maybe
import Text.Read
import qualified Text.Read.Lex as L
data TargetPlatform = IOS | MacOS | TVOS | WatchOS
deriving (Ord, Eq)
instance Show TargetPlatform where
show IOS = "iOS"
show MacOS = "macOS"
show TVOS = "tvOS"
show WatchOS = "watchOS"
allTargetPlatforms :: [TargetPlatform]
allTargetPlatforms = [IOS, MacOS, TVOS, WatchOS]
instance Read TargetPlatform where
readPrec = parens $ do
L.Ident s <- lexP
case map toLower s of
"ios" -> return IOS
"macos" -> return MacOS
"tvos" -> return TVOS
"watchos" -> return WatchOS
a -> error $ "Unrecognized platform " ++ a
|
r-peck/Rome
|
src/Data/TargetPlatform.hs
|
mit
| 824 | 0 | 13 | 253 | 225 | 122 | 103 | 25 | 1 |
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
module TypeCheck where
import Foundation (($),(.),Maybe(Just,Nothing),pure,(<>))
import Foundation.Collection (reverse)
import Prelude (Show)
import qualified Prelude as P (show,error)
import GHC.Stack (HasCallStack)
import Control.Monad.Logger (MonadLogger)
import Control.Monad.Logger.CallStack (logDebug)
import Control.Monad.Except (withExceptT)
import Control.Monad (foldM)
import Data.Text (Text,pack,unpack)
import Data.Maybe (maybe)
import Syntax (EType,Term(Var,Type,Pi,Lam,App,Ann,Pos,Paren,Let,Sig,Def,Sigma,Prod),ETerm(EVar,EType,EPi,ELam,EApp,ELet,ESig,EDef,ESigma,EProd,EAnn,ECase))
import Environment (Env,lookupSig,extendCtxSig,extendCtxDef,extendSourceLocation)
import Equal (Whnf(Whnf),whnf,equate,ensurePi)
import Substitution (subst)
import Error (ResultM,throwErr,err,Error(NotInScope,NotEqual,LambdaMustHaveFunctionType,TypesDontMatch,AppTypesDontMatch,CouldNotInferType,ExpectedType,MustAnnotateLambda))
show :: Show a => a -> Text
show = pack . P.show
-- $setup
-- >>> :set -XOverloadedStrings
-- >>> import Environment (emptyEnv)
inferType :: (MonadLogger m, HasCallStack) => Env -> Term -> ResultM m (ETerm,EType)
inferType env term = do
logDebug $ "inferType: " <> show term
tcTerm env term Nothing
checkType :: (MonadLogger m, HasCallStack) => Env -> Term -> EType -> ResultM m (ETerm,EType)
checkType env term expectedType = do
logDebug $ "checkType: " <> show term <> "\n " <> show expectedType
tcTerm env term $ Just $ whnf env expectedType
tcType :: (MonadLogger m, HasCallStack) => Env -> Term -> ResultM m (ETerm,EType)
tcType env term = withExceptT ((err $ ExpectedType env term) <>) $ do
logDebug $ "tcType: " <> show term
checkType env term EType
logAndReturn :: MonadLogger m => ETerm -> EType -> ResultM m (ETerm, EType)
logAndReturn eTerm eType = do
logDebug $ "tcTerm " <> typeName eTerm <> ": returning elaborated term and type:\n " <> show eTerm <> "\n " <> show eType
pure (eTerm, eType)
where typeName eTerm = case eTerm of
EType{} -> "Type"
EVar{} -> "Var"
ELam{} -> "Lam"
EApp{} -> "App"
EPi{} -> "Pi"
EAnn{} -> "Ann"
ELet{} -> "Let"
ESig{} -> "Sig"
EDef{} -> "Def"
ESigma{} -> "Sigma"
EProd{} -> "Prod"
ECase{} -> "Case"
tcTerm :: (MonadLogger m, HasCallStack) => Env -> Term -> Maybe Whnf -> ResultM m (ETerm,EType)
tcTerm env term@(Var name) Nothing = do
logDebug $ "tcTerm: " <> show term
case lookupSig env name of
Just sig -> logAndReturn (EVar term sig) sig
Nothing -> throwErr $ NotInScope env name
tcTerm _ term@Type Nothing = do
logDebug $ "tcTerm: " <> show term
logAndReturn EType EType
tcTerm env term@(Pi mname a b) Nothing = do
logDebug $ "tcTerm: " <> show term
(eTermA,_) <- tcType env a
(eTermB,_) <- tcType (maybe env (\name -> extendCtxSig name eTermA env) mname) b
logAndReturn (EPi term eTermA eTermB) EType
tcTerm env term@(Lam name mtype body) w@(Just (Whnf expected@(EPi _ a b))) = do
logDebug $ "tcTerm: " <> show term <> "\n " <> show w
aa <- case mtype of
Just typeAnnot -> do
(eTerm,_) <- tcType env typeAnnot
res <- equate env a eTerm
if res then pure a
else throwErr $ NotEqual env a typeAnnot
Nothing -> pure a
(eBody, tBody) <- checkType (extendCtxSig name aa env) body b
logAndReturn (ELam term aa eBody tBody) expected
tcTerm env term@Lam{} (Just (Whnf expected)) = throwErr $ LambdaMustHaveFunctionType env term expected
tcTerm env term@(Lam _ Nothing _) Nothing = throwErr $ MustAnnotateLambda env term
tcTerm env term@(Lam name (Just typeAnnot) body) Nothing = do
logDebug $ "tcTerm: " <> show term
(eTerm,_) <- tcType env typeAnnot
(eBody, tBody) <- inferType (extendCtxSig name eTerm env) body
logAndReturn (ELam term eTerm eBody tBody) (EPi Type eTerm tBody)
tcTerm env term@(App f arg) Nothing = do
logDebug $ "tcTerm: " <> show term
(eF, tF) <- inferType env f
(mname, eA, eB) <- ensurePi env tF
logDebug $ "Checking if application parameter (" <> show arg <> ") matches function argument type (" <> show eA <> ")"
(eArg, _) <- withExceptT ((<>) $ err $ AppTypesDontMatch env term eA arg) $ checkType env arg eA
let substitutedB = subst mname eArg eB
logAndReturn (EApp term eF eArg substitutedB) substitutedB
tcTerm env term@(Ann annotatedTerm typeAnnot) Nothing = do
logDebug $ "tcTerm: " <> show term
(eTypeAnnot,_) <- tcType env typeAnnot
(eAnnotatedTerm, tAnnotatedTerm) <- checkType env annotatedTerm eTypeAnnot
logAndReturn eAnnotatedTerm tAnnotatedTerm
tcTerm env (Pos sourcePos term) expected = tcTerm (extendSourceLocation sourcePos env) term expected
tcTerm env (Paren term) expected = tcTerm env term expected
tcTerm env term@(Let exprs body) expected = do
logDebug $ "tcTerm: " <> show term <> "\n " <> show expected
let
tcLetExpr :: MonadLogger m => (Env, [ETerm]) -> Term -> ResultM m (Env,[ETerm])
tcLetExpr a (Pos _ trm) = tcLetExpr a trm
tcLetExpr (env,eExprs) orig@(Sig name typeSig) = do
(eTypeSig,_) <- inferType env typeSig
pure (extendCtxSig name eTypeSig env, ESig orig eTypeSig : eExprs)
tcLetExpr (env,eExprs) orig@(Def name trm) = case lookupSig env name of
Nothing -> throwErr $ NotInScope env name
Just typeSig -> do
(eTerm, tTerm) <- checkType env trm typeSig
pure (extendCtxDef name eTerm env, EDef orig eTerm tTerm : eExprs)
(newEnv, eExprs) <- foldM tcLetExpr (env,[]) exprs
(eBody, tBody) <- tcTerm newEnv body expected
logAndReturn (ELet term (reverse eExprs) eBody tBody) tBody
tcTerm env term@(Sig _ typeSig) expected = do
logDebug $ "tcTerm: " <> show term <> "\n " <> show expected
(eTypeSig, _) <- inferType env typeSig
logAndReturn (ESig term eTypeSig) eTypeSig
tcTerm env term@(Def _ trm) expected = do
logDebug $ "tcTerm: " <> show term <> "\n " <> show expected
(eTrm, tTrm) <- tcTerm env trm expected
logAndReturn (EDef term eTrm tTrm) tTrm
tcTerm env term@(Sigma (Just name) (Just aType) (Just bType)) Nothing = do
logDebug $ "tcTerm: " <> show term
(eAType,_) <- tcType env aType
(eBType,_) <- tcType (extendCtxSig name eAType env) bType
logAndReturn (ESigma term (Just eAType) (Just eBType)) EType
tcTerm env term@(Sigma _ (Just aType) Nothing) Nothing = do
logDebug $ "tcTerm: " <> show term
(eAType,_) <- tcType env aType
logAndReturn (ESigma term (Just eAType) Nothing) EType
tcTerm _ term@(Sigma Nothing Nothing Nothing) Nothing = do
logDebug $ "tcTerm: " <> show term
logAndReturn (ESigma term Nothing Nothing) EType
tcTerm _ term@(Prod Nothing Nothing) Nothing = do
logDebug $ "tcTerm: " <> show term
let eType = ESigma (Sigma Nothing Nothing Nothing) Nothing Nothing
logAndReturn (EProd term Nothing Nothing eType) eType
tcTerm env term@(Prod (Just a) mb) Nothing = do
logDebug $ "tcTerm: " <> show term
(eA,tA) <- inferType env a
(meB, mtB) <- case mb of
Nothing -> pure (Nothing, Nothing)
Just b -> do
(eB,tB) <- inferType env b
pure (Just eB, Just tB)
let eType = ESigma Type (Just tA) mtB
logAndReturn (EProd term (Just eA) meB eType) eType
tcTerm env term@(Prod (Just a) mb) (Just (Whnf expected@(ESigma _ (Just aType) mbType))) = do
logDebug $ "tcTerm: " <> show term <> "\n " <> show expected
(eA,tA) <- checkType env a aType
(meB, mtB) <- case (mb,mbType) of
(Just b, Just bType) -> do
(eB, tB) <- checkType env b bType
pure (Just eB, Just tB)
(Nothing, Nothing) -> pure (Nothing,Nothing)
_ -> throwErr $ NotEqual env expected term
let eType = ESigma Type (Just tA) mtB
logAndReturn (EProd term (Just eA) meB eType) eType
tcTerm env term (Just (Whnf expected)) = do
logDebug $ "tcTerm: " <> show term <> "\n " <> show expected
withExceptT ((<>) $ err $ CouldNotInferType env term) $ do
(eTerm, tTerm) <- inferType env term
res <- equate env tTerm expected
if res then logAndReturn eTerm expected
else throwErr $ TypesDontMatch env term expected tTerm
tcTerm _ term expected = P.error $ unpack $ "(" <> show term <> ") (" <> show expected <> ")"
|
jyrimatti/alava
|
src-lib/TypeCheck.hs
|
mit
| 8,683 | 0 | 18 | 2,143 | 3,386 | 1,726 | 1,660 | -1 | -1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
module Language.C.Inline.ParseSpec (spec) where
import Control.Exception (evaluate)
import Control.Monad (void)
import Control.Monad.Trans.Class (lift)
import qualified Data.HashSet as HashSet
import Data.Monoid ((<>))
import qualified Test.Hspec as Hspec
import Text.Parser.Char
import Text.Parser.Combinators
import Text.RawString.QQ (r)
import Text.Regex.Posix ((=~))
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative ((<*), (*>))
#endif
import Language.C.Inline.Context
import Language.C.Inline.HaskellIdentifier
import Language.C.Inline.Internal
import qualified Language.C.Types as C
spec :: Hspec.SpecWith ()
spec = do
Hspec.describe "parsing" $ do
Hspec.it "parses simple C expression" $ do
(retType, params, cExp) <- goodParse [r|
int { (int) ceil($(double x) + ((double) $(float y))) }
|]
retType `Hspec.shouldBe` (cty "int")
params `shouldMatchParameters` [(cty "double", Plain "x"), (cty "float", Plain "y")]
cExp `shouldMatchBody` " (int) ceil(x[a-z0-9_]+ \\+ ((double) y[a-z0-9_]+)) "
Hspec.it "accepts anti quotes" $ do
void $ goodParse [r| int { $(int x) } |]
Hspec.it "accepts anti quotes with pointer" $ do
void $ goodParse [r| int* { $(int* x) } |]
Hspec.it "rejects if bad braces (1)" $ do
badParse [r| int x |]
Hspec.it "rejects if bad braces (2)" $ do
badParse [r| int { x |]
Hspec.it "parses function pointers" $ do
void $ goodParse [r| int(int (*add)(int, int)) { add(3, 4) } |]
Hspec.it "parses returning function pointers" $ do
(retType, params, cExp) <-
goodParse [r| double (*)(double) { &cos } |]
retType `Hspec.shouldBe` (cty "double (*)(double)")
params `shouldMatchParameters` []
cExp `shouldMatchBody` " &cos "
Hspec.it "parses Haskell identifier (1)" $ do
(retType, params, cExp) <- goodParse [r| double { $(double x') } |]
retType `Hspec.shouldBe` (cty "double")
params `shouldMatchParameters` [(cty "double", Plain "x'")]
cExp `shouldMatchBody` " x[a-z0-9_]+ "
Hspec.it "parses Haskell identifier (2)" $ do
(retType, params, cExp) <- goodParse [r| double { $(double ä') } |]
retType `Hspec.shouldBe` (cty "double")
params `shouldMatchParameters` [(cty "double", Plain "ä'")]
cExp `shouldMatchBody` " [a-z0-9_]+ "
Hspec.it "parses Haskell identifier (3)" $ do
(retType, params, cExp) <- goodParse [r| int { $(int Foo.bar) } |]
retType `Hspec.shouldBe` (cty "int")
params `shouldMatchParameters` [(cty "int", Plain "Foo.bar")]
cExp `shouldMatchBody` " Foobar[a-z0-9_]+ "
Hspec.it "does not parse Haskell identifier in bad position" $ do
badParse [r| double (*)(double Foo.bar) { 3.0 } |]
where
ctx = baseCtx <> funCtx
assertParse ctxF p s =
case C.runCParser (ctxF HashSet.empty) "spec" s (lift spaces *> p <* lift eof) of
Left err -> error $ "Parse error (assertParse): " ++ show err
Right x -> x
-- We use show + length to fully evaluate the result -- there
-- might be exceptions hiding. TODO get rid of exceptions.
strictParse
:: String
-> IO (C.Type C.CIdentifier, [(C.CIdentifier, C.Type C.CIdentifier, ParameterType)], String)
strictParse s = do
let ParseTypedC retType pars body =
assertParse (haskellCParserContext True) (parseTypedC True (ctxAntiQuoters ctx)) s
void $ evaluate $ length $ show (retType, pars, body)
return (retType, pars, body)
goodParse = strictParse
badParse s = strictParse s `Hspec.shouldThrow` Hspec.anyException
cty :: String -> C.Type C.CIdentifier
cty s = C.parameterDeclarationType $
assertParse (C.cCParserContext True) C.parseParameterDeclaration s
shouldMatchParameters
:: [(C.CIdentifier, C.Type C.CIdentifier, ParameterType)]
-> [(C.Type C.CIdentifier, ParameterType)]
-> Hspec.Expectation
shouldMatchParameters pars pars' =
[(x, y) | (_, x, y) <- pars] `Hspec.shouldMatchList` pars'
shouldMatchBody :: String -> String -> Hspec.Expectation
shouldMatchBody x y = do
let f ch' = case ch' of
'(' -> "\\("
')' -> "\\)"
ch -> [ch]
(x =~ concatMap f y) `Hspec.shouldBe` True
|
fpco/inline-c
|
inline-c/test/Language/C/Inline/ParseSpec.hs
|
mit
| 4,700 | 0 | 16 | 1,137 | 1,258 | 689 | 569 | 97 | 4 |
module GHCJS.DOM.StorageErrorCallback (
) where
|
manyoo/ghcjs-dom
|
ghcjs-dom-webkit/src/GHCJS/DOM/StorageErrorCallback.hs
|
mit
| 50 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
-- | Static tiles and collections
module Game.Mahjong.Static.Tiles (
-- ** Individual tiles
c1, c2, c3, c4, c5, c6, c7, c8, c9,
b1, b2, b3, b4, b5, b6, b7, b8, b9,
k1, k2, k3, k4, k5, k6, k7, k8, k9,
wn, ws, we, ww,
dr, dg, dw,
f1, f2, f3, f4,
s1, s2, s3, s4,
-- ** Tile collections
coins, bamboos, characters, winds, dragons, flowers, seasons,
simples, terminals, suits, honors, edges, bonuses,
reds, greens,
regulars, allTiles,
) where
import Game.Mahjong.Tile
-------------------------------------------------------------------------------
-- Tile Aliases
-------------------------------------------------------------------------------
c1, c2, c3, c4, c5, c6, c7, c8, c9 :: Tile
c1 = mkCoin One
c2 = mkCoin Two
c3 = mkCoin Three
c4 = mkCoin Four
c5 = mkCoin Five
c6 = mkCoin Six
c7 = mkCoin Seven
c8 = mkCoin Eight
c9 = mkCoin Nine
b1, b2, b3, b4, b5, b6, b7, b8, b9 :: Tile
b1 = mkBamboo One
b2 = mkBamboo Two
b3 = mkBamboo Three
b4 = mkBamboo Four
b5 = mkBamboo Five
b6 = mkBamboo Six
b7 = mkBamboo Seven
b8 = mkBamboo Eight
b9 = mkBamboo Nine
k1, k2, k3, k4, k5, k6, k7, k8, k9 :: Tile
k1 = mkCharacter One
k2 = mkCharacter Two
k3 = mkCharacter Three
k4 = mkCharacter Four
k5 = mkCharacter Five
k6 = mkCharacter Six
k7 = mkCharacter Seven
k8 = mkCharacter Eight
k9 = mkCharacter Nine
we, ws, ww, wn :: Tile
we = mkWind East
ws = mkWind South
ww = mkWind West
wn = mkWind North
dr, dg, dw :: Tile
dr = mkDragon Red
dg = mkDragon Green
dw = mkDragon White
f1, f2, f3, f4 :: Tile
f1 = mkFlower PlumBlossom
f2 = mkFlower Orchid
f3 = mkFlower Chrysanthemum
f4 = mkFlower BambooTree
s1, s2, s3, s4 :: Tile
s1 = mkSeason Spring
s2 = mkSeason Summer
s3 = mkSeason Autumn
s4 = mkSeason Winter
-------------------------------------------------------------------------------
-- Tile collections
-------------------------------------------------------------------------------
-- | List of coin tiles.
coins :: [Tile]
coins = map mkCoin [One ..]
-- | List of bamboo tiles.
bamboos :: [Tile]
bamboos = map mkBamboo [One ..]
-- | List of character tiles.
characters :: [Tile]
characters = map mkCharacter [One ..]
-- | List of wind tiles.
winds :: [Tile]
winds = map mkWind [East ..]
-- | List of dragon tiles.
dragons :: [Tile]
dragons = map mkDragon [Red ..]
-- | List of flower tiles.
flowers :: [Tile]
flowers = map mkFlower [PlumBlossom ..]
-- | List of season tiles.
seasons :: [Tile]
seasons = map mkSeason [Spring ..]
-- | List of simple tiles.
simples :: [Tile]
simples = [coins, bamboos, characters] >>= (\x -> tail . init $ x)
-- | List of terminal tiles.
terminals :: [Tile]
terminals = [coins, bamboos, characters] >>= (\x -> [head x, last x])
-- | List of suit tiles.
suits :: [Tile]
suits = coins ++ bamboos ++ characters
-- | List of honor tiles.
honors :: [Tile]
honors = winds ++ dragons
-- | List of edge tiles.
edges :: [Tile]
edges = terminals ++ honors
-- | List of bonus tiles.
bonuses :: [Tile]
bonuses = flowers ++ seasons
-- | List of red tiles.
reds :: [Tile]
reds = map mkBamboo [One, Five, Seven, Nine] ++ [mkDragon Red]
-- | List of green tiles.
greens :: [Tile]
greens = map mkBamboo [Two, Three, Four, Six, Eight] ++ [mkDragon Green]
-- | List of blue tiles.
blues :: [Tile]
blues = [mkCoin Eight] ++ winds ++ [mkDragon White]
-- | List of all regular tiles without bonus tiles.
regulars :: [Tile]
regulars = concat [coins, bamboos, characters, winds, dragons]
-- | List containing all tiles
allTiles :: [Tile]
allTiles = regulars ++ bonuses
|
gspindles/mj-score-eval
|
src/Game/Mahjong/Static/Tiles.hs
|
mit
| 3,681 | 0 | 9 | 830 | 1,140 | 696 | 444 | 98 | 1 |
module Common.Debug (
trace,
traceList
) where
import Data.List
import qualified Debug.Trace as Trace
trace :: Show a => String -> a -> a
trace t a = Trace.trace (t ++ " = " ++ show a) a
-- trace _ = id
traceList :: Show a => String -> [a] -> [a]
traceList t as = Trace.trace (t ++ " = [\n\t" ++ intercalate "\n\t" (map show as) ++ "\n]") as
--traceList _ = id
|
Chatanga/kage
|
src/Common/Debug.hs
|
mit
| 372 | 0 | 11 | 87 | 151 | 81 | 70 | 9 | 1 |
module SpaceState.Space(runGame, Difficulty(..))
where
import System.Random
import Data.List
import Data.Maybe
import Control.Monad
import Control.Monad.State as State
import Prelude hiding (catch)
import Graphics.UI.SDL as SDL
import Graphics.Rendering.FTGL as FTGL
import Entity
import Camera
import AObject
import Combat
import Space
import Utils
import Mission
import Tree
import SpaceState.Common
import SpaceState.Input
import SpaceState.Game
import SpaceState.City
import SpaceState.Init
import SpaceState.Difficulty
import SpaceState.Combat
runGame :: String -> Difficulty -> Font -> Font -> IO Int
runGame plname d f f2 = do
let is = startState plname d f f2
setCamera (camera $ camstate is)
evalStateT (do
initState
loop)
is
loop :: StateT SpaceState IO Int
loop = untilDoneR $ do
liftIO $ delay 10
state <- State.get
drawSpace
dead <- if stopped state
then return False
else updateSpaceState
if not dead
then do
quits <- handleEvents
if quits
then gameOver "You decided to retire." "" >> die
else return Nothing
else die
updateSpaceState :: StateT SpaceState IO Bool
updateSpaceState = do
state <- State.get
modify $ modTri (updateEntity 1)
modify $ modAObjects $
mapInner (\(ang, rad) -> if ang == 0
then (ang, rad)
else (ang + 10 * recip rad, rad))
modify $ modAObjects $
fmap (\a -> if orbitRadius a == 0
then a
else modifyAngle (+ (10 * orbitalSpeedcoeff a * recip (orbitRadius a))) a)
modify $ modAObjects $ setupBarycenters
let mlanded = findCollisions (getShipBox $ tri state) (aobjects state)
case mlanded of
Nothing -> do
val <- liftIO $ randomRIO (0, 500 :: Int)
if (val == 0)
then startCombat Nothing >>= return . fst
else return False
Just lc -> do
if aobjName lc == "Star"
then lostLife "You flew too close to the star!" recoveryText
else do
diedInCity <- enteringCity lc
releaseKeys
return diedInCity
-- returns: nothing -> no police contact
-- or Just (gameover?, combatwon?)
survivedPolice :: AObject -> StateT SpaceState IO (Maybe (Bool, Bool))
survivedPolice lc = do
let alleg = getAllegiance lc
state <- State.get
let attid = allegAttitude alleg state
if attid >= pointsUntilThePoliceArrives
then return Nothing
else do
let s = concat ["Approacing the planet, you suddenly spot a police ship\"\n",
"approaching you! Your recent activities seem to have\n",
"alarmed the local authorities! What to do?\n\n",
"Press ENTER to fight your way to the starport\n",
"or ESCAPE to escape"]
pship <- liftIO $ randomPolice $ difficultyAIshift $ difficulty state
startCombat (Just (s, pship, getAllegiance lc)) >>= return . Just
enteringCity :: AObject -> StateT SpaceState IO Bool
enteringCity lc = do
n <- survivedPolice lc
case n of
Nothing -> gotoCity lc >> return False
Just (gameover, combatwon) -> do
if combatwon
then gotoCity lc
else catapult lc
return gameover
|
anttisalonen/starrover2
|
src/SpaceState/Space.hs
|
mit
| 3,264 | 0 | 18 | 913 | 928 | 472 | 456 | 99 | 6 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Auth
where
import Control.Lens
import Control.Monad.Catch (try)
import Control.Monad.Reader
import Data.Text
import Database.Persist
import DBTypes
import Domain.User
import Models
import Network.Wai
import Operation
import Servant
import Servant.Server.Experimental.Auth
import Servant.Server.Experimental.Auth.Cookie
import Types
type instance AuthCookieData = Either CookieError User
type family ProtectEndpoints a where
ProtectEndpoints (a :<|> b) = (ProtectEndpoints a) :<|> (ProtectEndpoints b)
ProtectEndpoints (a :> b) = a :> ProtectEndpoints b
ProtectEndpoints a = AppAuth :> a
type AppAuth = AuthProtect "cookie-auth"
instance HasLink sub => HasLink (AppAuth :> sub) where
type MkLink (AppAuth :> sub) = MkLink sub
toLink _ = toLink (Proxy :: Proxy sub)
cookieAuthHandler :: Config -> AuthHandler Request (Either CookieError User)
cookieAuthHandler config@Config{..} = mkAuthHandler $ \request -> flip runReaderT config $ do
result <- lift $ try $ getSession authSettings serverKey request
case result :: Either AuthCookieException (Maybe Session) of
Left a -> return $ Left $ AuthError a
Right Nothing -> return (Left NotPresent)
Right (Just s) -> do
user <- runTransaction $ unsafeRunOperation $ dbGetUser (sessionUserID s)
case user of
Left _ -> return $ Left SessionInvalid
Right u@UserB{_userStatus=ActiveU} -> return $ Right u
Right _ -> return $ Left InactiveUser
data LoginError = UsernameNotFound Text
| TenantInactive TenantId
| WrongPassword
validateLogin :: LoginForm -> App (Either LoginError UserId)
validateLogin Login{..} =
runTransaction $ do
muser <- getBy (UniqueUsername loginUsername)
case muser of
Nothing -> return $ Left $ UsernameNotFound loginUsername
Just Entity{entityVal=user, entityKey=uid} -> do
mtenant <- get (view dBUserTenantID user)
case mtenant of
Nothing -> return $ Left $ TenantInactive (view dBUserTenantID user)
Just tenant -> case view dBTenantStatus tenant of
ActiveT -> if loginPassword == user ^. dBUserPassword
then return $ Right $ uid
else return $ Left WrongPassword
_ -> return $ Left $ TenantInactive (user ^. dBUserTenantID)
|
wz1000/haskell-webapps
|
ServantPersistent/src/Auth.hs
|
mit
| 2,898 | 0 | 22 | 926 | 729 | 374 | 355 | 60 | 5 |
module Day2 ( ) where
import Data.List.Split
input = readFile "input/day2.txt"
sideAreas l w h = [l * w, w * h, h* l]
area [l, w, h] = 2 * (sum $ sideAreas l w h) + (minimum $ sideAreas l w h)
toSides = map ((map read) . splitOn "x") . lines :: String -> [[Int]]
p1 = sum . map area . toSides <$> input
-- 1588178
sidePerims l w h = map (2*) [l+w, l+h, w+h]
bow l w h = l * w * h
ribbon [l, w, h] = (minimum $ sidePerims l w h) + bow l w h
p2 = sum . map ribbon . toSides <$> input
-- 3783758
|
farrellm/advent
|
src/Day2.hs
|
mit
| 500 | 0 | 10 | 125 | 294 | 158 | 136 | 11 | 1 |
{-# htermination (elemOrdering :: Ordering -> (List Ordering) -> MyBool) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Ordering = LT | EQ | GT ;
map :: (b -> a) -> (List b) -> (List a);
map f Nil = Nil;
map f (Cons x xs) = Cons (f x) (map f xs);
foldr :: (b -> a -> a) -> a -> (List b) -> a;
foldr f z Nil = z;
foldr f z (Cons x xs) = f x (foldr f z xs);
pePe :: MyBool -> MyBool -> MyBool;
pePe MyFalse x = x;
pePe MyTrue x = MyTrue;
or :: (List MyBool) -> MyBool;
or = foldr pePe MyFalse;
pt :: (c -> a) -> (b -> c) -> b -> a;
pt f g x = f (g x);
any :: (a -> MyBool) -> (List a) -> MyBool;
any p = pt or (map p);
esEsOrdering :: Ordering -> Ordering -> MyBool
esEsOrdering LT LT = MyTrue;
esEsOrdering LT EQ = MyFalse;
esEsOrdering LT GT = MyFalse;
esEsOrdering EQ LT = MyFalse;
esEsOrdering EQ EQ = MyTrue;
esEsOrdering EQ GT = MyFalse;
esEsOrdering GT LT = MyFalse;
esEsOrdering GT EQ = MyFalse;
esEsOrdering GT GT = MyTrue;
elemOrdering :: Ordering -> (List Ordering) -> MyBool
elemOrdering = pt any esEsOrdering;
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/basic_haskell/elem_7.hs
|
mit
| 1,140 | 0 | 9 | 296 | 523 | 284 | 239 | 31 | 1 |
module Language.TaPL.Boolean.Syntax (Term(..), isVal) where
import Test.QuickCheck (Arbitrary, arbitrary, sized, oneof)
import Control.Monad (liftM3)
import Control.Applicative ((<$>))
import Language.TaPL.ShowPretty (ShowPretty, showp)
data Term = TmTrue
| TmFalse
| TmIf Term Term Term
deriving (Eq, Read, Show)
isVal :: Term -> Bool
isVal TmTrue = True
isVal TmFalse = True
isVal _ = False
instance ShowPretty Term where
showp TmTrue = "true"
showp TmFalse = "false"
showp (TmIf t1 t2 t3) = "if " ++ showp t1 ++ " then " ++ showp t2 ++ " else " ++ showp t3
instance Arbitrary Term where
arbitrary = arbitraryTerm
arbitraryTerm = sized arbitraryTerm'
arbitraryTerm' 0 = oneof $ return <$> [ TmTrue
, TmFalse
]
arbitraryTerm' n | n > 0 = oneof [ return TmTrue
, return TmFalse
, liftM3 TmIf subterm subterm subterm
]
where subterm = arbitraryTerm' (n `div` 2)
|
zeckalpha/TaPL
|
src/Language/TaPL/Boolean/Syntax.hs
|
mit
| 1,081 | 0 | 11 | 361 | 320 | 174 | 146 | 26 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigateway-basepathmapping.html
module Stratosphere.Resources.ApiGatewayBasePathMapping where
import Stratosphere.ResourceImports
-- | Full data type definition for ApiGatewayBasePathMapping. See
-- 'apiGatewayBasePathMapping' for a more convenient constructor.
data ApiGatewayBasePathMapping =
ApiGatewayBasePathMapping
{ _apiGatewayBasePathMappingBasePath :: Maybe (Val Text)
, _apiGatewayBasePathMappingDomainName :: Val Text
, _apiGatewayBasePathMappingRestApiId :: Maybe (Val Text)
, _apiGatewayBasePathMappingStage :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToResourceProperties ApiGatewayBasePathMapping where
toResourceProperties ApiGatewayBasePathMapping{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::ApiGateway::BasePathMapping"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ fmap (("BasePath",) . toJSON) _apiGatewayBasePathMappingBasePath
, (Just . ("DomainName",) . toJSON) _apiGatewayBasePathMappingDomainName
, fmap (("RestApiId",) . toJSON) _apiGatewayBasePathMappingRestApiId
, fmap (("Stage",) . toJSON) _apiGatewayBasePathMappingStage
]
}
-- | Constructor for 'ApiGatewayBasePathMapping' containing required fields as
-- arguments.
apiGatewayBasePathMapping
:: Val Text -- ^ 'agbpmDomainName'
-> ApiGatewayBasePathMapping
apiGatewayBasePathMapping domainNamearg =
ApiGatewayBasePathMapping
{ _apiGatewayBasePathMappingBasePath = Nothing
, _apiGatewayBasePathMappingDomainName = domainNamearg
, _apiGatewayBasePathMappingRestApiId = Nothing
, _apiGatewayBasePathMappingStage = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigateway-basepathmapping.html#cfn-apigateway-basepathmapping-basepath
agbpmBasePath :: Lens' ApiGatewayBasePathMapping (Maybe (Val Text))
agbpmBasePath = lens _apiGatewayBasePathMappingBasePath (\s a -> s { _apiGatewayBasePathMappingBasePath = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigateway-basepathmapping.html#cfn-apigateway-basepathmapping-domainname
agbpmDomainName :: Lens' ApiGatewayBasePathMapping (Val Text)
agbpmDomainName = lens _apiGatewayBasePathMappingDomainName (\s a -> s { _apiGatewayBasePathMappingDomainName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigateway-basepathmapping.html#cfn-apigateway-basepathmapping-restapiid
agbpmRestApiId :: Lens' ApiGatewayBasePathMapping (Maybe (Val Text))
agbpmRestApiId = lens _apiGatewayBasePathMappingRestApiId (\s a -> s { _apiGatewayBasePathMappingRestApiId = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigateway-basepathmapping.html#cfn-apigateway-basepathmapping-stage
agbpmStage :: Lens' ApiGatewayBasePathMapping (Maybe (Val Text))
agbpmStage = lens _apiGatewayBasePathMappingStage (\s a -> s { _apiGatewayBasePathMappingStage = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/Resources/ApiGatewayBasePathMapping.hs
|
mit
| 3,169 | 0 | 15 | 353 | 461 | 262 | 199 | 40 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveFunctor #-}
module FVL.TypeAST
( Expr(..)
, FType(..)
, typeTransform
) where
import FVL.Algebra
import qualified FVL.FAST as FAST
data Expr a b
= CInt Integer
| CBool Bool
| CVar String
| Add b b
| Sub b b
| Mul b b
| Div b b
| And b b
| Or b b
| Not b
| Equal b b
| Less b b
| Empty
| Cons b b
| If b b b
| Function String a
| Appl b b
| LetRec String String a a
| Case b b String String a
deriving Functor
data FType = FInt
| FBool
| FVar Int
| FArrow FType FType
| FList FType
| FNotClosed deriving (Eq, Ord)
instance Show FType where
show FInt = "Int"
show FBool = "Bool"
show (FVar n) = "'a" ++ show n
show (FArrow x y) = show x ++ " -> " ++ show y
show (FList t) = "[" ++ show t ++ "]"
show _ = ""
alg :: Algebra FAST.Expr (LazyFix Expr)
alg (FAST.CInt n) = Fx' $ CInt n
alg (FAST.CBool b) = Fx' $ CBool b
alg (FAST.CVar s) = Fx' $ CVar s
alg (FAST.Add x y) = Fx' $ Add x y
alg (FAST.Sub x y) = Fx' $ Sub x y
alg (FAST.Mul x y) = Fx' $ Mul x y
alg (FAST.Div x y) = Fx' $ Div x y
alg (FAST.And x y) = Fx' $ And x y
alg (FAST.Or x y) = Fx' $ Or x y
alg (FAST.Not x) = Fx' $ Not x
alg (FAST.Equal x y) = Fx' $ Equal x y
alg (FAST.Less x y) = Fx' $ Less x y
alg FAST.Empty = Fx' $ Empty
alg (FAST.Cons x y) = Fx' $ Cons x y
alg (FAST.If p x y) = Fx' $ If p x y
alg (FAST.Function s p) = Fx' $ Function s p
alg (FAST.Appl f x) = Fx' $ Appl f x
alg (FAST.LetRec f x p e) = Fx' $ LetRec f x p e
alg (FAST.Case p x s t y) = Fx' $ Case p x s t y
typeTransform :: Fix FAST.Expr -> LazyFix Expr
typeTransform = cata alg
|
burz/Feval
|
FVL/TypeAST.hs
|
mit
| 1,737 | 0 | 8 | 550 | 882 | 452 | 430 | 64 | 1 |
{- |
Module : $Header$
Description : Definition of signature morphisms for
first-order logic with dependent types (DFOL)
Copyright : (c) Kristina Sojakova, DFKI Bremen 2009
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : portable
-}
module DFOL.Morphism
( Morphism (..)
, idMorph
, compMorph
, isValidMorph
, canForm
, applyMorph
, mapSymbol
, inclusionMorph
, morphUnion
, inducedFromMorphism
, inducedFromToMorphism
, coGenSig
, toTermMap
) where
import DFOL.AS_DFOL
import DFOL.Sign
import DFOL.Symbol
import Common.Result
import Common.Doc
import Common.DocUtils
import Common.Id
import Common.ExtSign
import qualified Common.Result as Result
import qualified Data.Map as Map
import qualified Data.Set as Set
-- morphisms for DFOL - maps of symbol names
data Morphism = Morphism
{ source :: Sign
, target :: Sign
, symMap :: Map.Map NAME NAME
} deriving (Ord, Show)
-- constructs an identity morphism
idMorph :: Sign -> Morphism
idMorph sig = Morphism sig sig Map.empty
-- composes two morphisms
compMorph :: Morphism -> Morphism -> Result Morphism
compMorph m1 m2 =
if target m1 /= source m2
then Result.Result [incompatibleMorphsError m1 m2] Nothing
else return $ Morphism (source m1) (target m2) $
Set.fold (\ sym1 -> let sym2 = mapSymbol m2
$ mapSymbol m1 sym1
in Map.insert sym1 sym2)
Map.empty $
getSymbols $ source m1
-- determines whether a morphism is valid
isValidMorph :: Morphism -> Bool
isValidMorph m@(Morphism sig1 sig2 map1) =
let sym1 = getSymbols sig1
sym2 = getSymbols sig2
checkDom = Set.isSubsetOf (Map.keysSet map1) sym1
checkCod = Set.isSubsetOf (Set.map (mapSymbol m) sym1) sym2
checkTypes = map (checkTypePres m) $ Set.toList sym1
in and $ [checkDom, checkCod] ++ checkTypes
checkTypePres :: Morphism -> NAME -> Bool
checkTypePres m n =
let Just type1 = getSymbolType n $ source m
Just type2 = getSymbolType (mapSymbol m n) $ target m
in applyMorph m type1 == type2
{- converts the morphism into its canonical form where the symbol map contains
no key/value pairs of the form (k,k) -}
canForm :: Morphism -> Morphism
canForm (Morphism sig1 sig2 map1) =
let map2 = Map.fromList $ filter (uncurry (/=)) $ Map.toList map1
in Morphism sig1 sig2 map2
-- constructs the inclusion morphism between signatures
inclusionMorph :: Sign -> Sign -> Result.Result Morphism
inclusionMorph sig1 sig2 =
let m = Morphism sig1 sig2 Map.empty
in if isValidMorph m
then Result.Result [] $ Just m
else Result.Result [noSubsigError sig1 sig2] Nothing
{- generated and cogenerated signatures
Algorithm description:
FOR GENERATED SIGNATURES
Input : a signature "sig" and a set of symbols "syms"
Output : an inclusion morphism
1 : Check if all symbols in syms occur in sig; if not, output Nothing
2 : Initialize the set of symbols "incl" which necessarily must be included
in the generated signature to syms
Initialize the set "done" of analyzed symbols to empty
Initialize the set "todo" of symbols to be analyzed to syms
3 : Check if todo is empty
3.1 : If yes, go to 5
3.2 : If not, go to 4
4 : Pick a symbol "s" from todo
4.1 : Get the type "t" of s in sig
4.2 : Get the set "vars" of free variables in t, i.e. the symbols of sig
that t depends on
4.3 : For each "v" in vars :
4.3.1 : Add v to incl
4.3.2 : If v does not occur in done, add it to todo
4.4 : Remove v from todo and add it to done
4.5 : Go to 3
5 : Let "sig1" be the subsignature of sig containing only the symbols in incl
and output the inclusion morphism m : sig1 -> sig
FOR COGENERATED SIGNATURES
Input : a signature "sig" and a set of symbols "syms"
Output : an inclusion morphism
1 : Check if all symbols in syms occur in sig; if not, output Nothing
2 : Initialize the set of symbols "excl" which necessarily must be excluded
from the cogenerated signature to syms
3 : For each symbol "s" in sig (keeping the order in which they are defined) :
3.1 : If s does not occur in excl :
4.1 : Get the type "t" of s in sig
4.2 : Get the set "vars" of free variables in t, i.e. the symbols
of sig that t depends on
4.3 : If any of the symbols in vars occurs in excl, add s to excl
4 : Let "sig1" be the subsignature of sig containing all the symbols not
occurring in excl and output the inclusion morphism m : sig1 -> sig
-}
coGenSig :: Bool -> Set.Set Symbol -> Sign -> Result Morphism
coGenSig flag syms sig@(Sign ds) =
let names = Set.map name syms
ds1 = expandDecls ds
in if Set.isSubsetOf names (getSymbols sig)
then let incl = if flag
then cogSig names ds1 sig
else genSig names Set.empty names sig
ds2 = map (\ (n, t) -> ([n], t))
$ filter (\ (n, _) -> Set.member n incl) ds1
in inclusionMorph (Sign ds2) sig
else Result.Result [symsNotInSigError names sig] Nothing
genSig :: Set.Set NAME -> Set.Set NAME -> Set.Set NAME -> Sign -> Set.Set NAME
genSig incl done todo sig =
if Set.null todo
then incl
else let n = Set.findMin todo
Just t = getSymbolType n sig
ns = getFreeVars t
incl1 = Set.union incl ns
ns1 = Set.filter (`Set.member` done) ns
done1 = Set.insert n done
todo1 = Set.union ns1 $ Set.delete n todo
in genSig incl1 done1 todo1 sig
cogSig :: Set.Set NAME -> [SDECL] -> Sign -> Set.Set NAME
cogSig excl [] sig = Set.difference (getSymbols sig) excl
cogSig excl ((n, t) : ds) sig =
if Set.member n excl
then cogSig excl ds sig
else let ns = Set.toList $ getFreeVars t
depen = any (`Set.member` excl) ns
in if depen
then let excl1 = Set.insert n excl
in cogSig excl1 ds sig
else cogSig excl ds sig
-- morphism union
morphUnion :: Morphism -> Morphism -> Result.Result Morphism
morphUnion m1@(Morphism sig1D sig1C map1) m2@(Morphism sig2D sig2C map2) =
let Result.Result diag1 sigDM = sigUnion sig1D sig2D
Result.Result diag2 sigCM = sigUnion sig1C sig2C
Result.Result diag3 map3M = combineMaps map1 map2
in case sigDM of
Nothing -> Result.Result diag1 Nothing
Just sigD ->
case sigCM of
Nothing -> Result.Result diag2 Nothing
Just sigC ->
case map3M of
Nothing -> Result.Result diag3 Nothing
Just map3 ->
let m = Morphism sigD sigC map3
in if isValidMorph m
then Result.Result [] $ Just m
else Result.Result
[invalidMorphError m1 m2] Nothing
combineMaps :: Map.Map NAME NAME -> Map.Map NAME NAME ->
Result.Result (Map.Map NAME NAME)
combineMaps map1 map2 = combineMapsH map1 $ Map.toList map2
combineMapsH :: Map.Map NAME NAME -> [(NAME, NAME)] ->
Result.Result (Map.Map NAME NAME)
combineMapsH map1 [] = Result.Result [] $ Just map1
combineMapsH map1 ((k, v) : ds) =
if Map.member k map1
then let Just v1 = Map.lookup k map1
in if v == v1
then combineMapsH map1 ds
else Result.Result [incompatibleMapError k v v1] Nothing
else let map2 = Map.insert k v map1
in combineMapsH map2 ds
-- applies a morphism to a symbol
mapSymbol :: Morphism -> NAME -> NAME
mapSymbol m sym = Map.findWithDefault sym sym $ symMap m
-- translates a term, type or formula along the given morphism
applyMorph :: Translatable a => Morphism -> a -> a
applyMorph m t =
let syms = getSymbols (target m)
map1 = toTermMap $ symMap m
in translate map1 syms t
toTermMap :: Map.Map NAME NAME -> Map.Map NAME TERM
toTermMap m = Map.fromList $ map (\ (k, a) -> (k, Identifier a))
$ Map.toList m
-- equality
instance Eq Morphism where
m1 == m2 = eqMorph (canForm m1) (canForm m2)
eqMorph :: Morphism -> Morphism -> Bool
eqMorph (Morphism s1 t1 map1) (Morphism s2 t2 map2) =
(s1, t1, map1) == (s2, t2, map2)
-- pretty printing
instance Pretty Morphism where
pretty = printMorph
printMorph :: Morphism -> Doc
printMorph m =
vcat $ if m == idMorph (source m)
then [text "Identity morphism on:", pretty $ source m]
else [text "Source signature:", pretty $ source m,
text "Target signature:", pretty $ target m,
text "Mapping:", printSymMap $ symMap m]
printSymMap :: Map.Map NAME NAME -> Doc
printSymMap m = vcat $ map (\ (k, a) -> pretty k <+> text "|->" <+> pretty a)
$ Map.toList m
-- induces a signature morphism from the source signature and a symbol map
inducedFromMorphism :: Map.Map Symbol Symbol -> Sign -> Result.Result Morphism
inducedFromMorphism map1 sig1 =
let map2 = toNameMap map1
Result.Result dgs sig2M = buildSig sig1 map2
in case sig2M of
Nothing -> Result.Result dgs Nothing
Just sig2 -> Result.Result [] $ Just $ Morphism sig1 sig2 map2
buildSig :: Sign -> Map.Map NAME NAME -> Result.Result Sign
buildSig (Sign ds) = buildSigH (expandDecls ds) emptySig
buildSigH :: [SDECL] -> Sign -> Map.Map NAME NAME -> Result.Result Sign
buildSigH [] sig _ = Result.Result [] $ Just sig
buildSigH ((n1, t1) : ds) sig map1 =
let n2 = Map.findWithDefault n1 n1 map1
map2 = toTermMap map1
syms = Set.map (\ n -> Map.findWithDefault n n map1)
$ getSymbols sig
t2 = translate map2 syms t1
in if isConstant n2 sig
then let Just t3 = getSymbolType n2 sig
in if t2 == t3
then buildSigH ds sig map1
else Result.Result [incompatibleViewError1 n2 t2 t3]
Nothing
else buildSigH ds (addSymbolDecl ([n2], t2) sig) map1
-- induces a signature morphism from the source and target sigs and a symbol map
inducedFromToMorphism :: Map.Map Symbol Symbol -> ExtSign Sign Symbol ->
ExtSign Sign Symbol -> Result.Result Morphism
inducedFromToMorphism map1 (ExtSign sig1 _) (ExtSign sig2 _) =
let map2 = toNameMap map1
m = Morphism sig1 sig2 map2
Sign ds = sig1
in buildMorph (expandDecls ds) m
buildMorph :: [SDECL] -> Morphism -> Result.Result Morphism
buildMorph [] m = Result.Result [] $ Just m
buildMorph ((n1, t1) : ds) m@(Morphism _ sig2 map1) = do
let t2 = applyMorph m t1
if Map.member n1 map1
then do
let n2 = mapSymbol m n1
let Just t3 = getSymbolType n2 sig2
if t2 == t3 then buildMorph ds m else
Result.Result [incompatibleViewError2 n2 t2 t3] Nothing
else do
let t3 = getSymbolType n1 sig2
if Just t2 == t3 then buildMorph ds m else do
let ss = getSymsOfType sig2 t2
case ss of
[s] -> buildMorph ds $
m {symMap = Map.insert n1 s $ symMap m}
[] -> Result.Result [noSymToMapError n1 t2] Nothing
_ -> Result.Result [manySymToMapError n1 t2 ss] Nothing
-- ERROR MESSAGES
incompatibleMorphsError :: Morphism -> Morphism -> Result.Diagnosis
incompatibleMorphsError m1 m2 =
Result.Diag
{ Result.diagKind = Result.Error
, Result.diagString = "Codomain of the morphism\n" ++ show (pretty m1)
++ "\nis different from the domain of the morphism\n"
++ show (pretty m2)
++ "\nhence their composition cannot be constructed."
, Result.diagPos = nullRange
}
incompatibleViewError1 :: NAME -> TYPE -> TYPE -> Result.Diagnosis
incompatibleViewError1 n t1 t2 =
Result.Diag
{ Result.diagKind = Result.Error
, Result.diagString = "Symbol\n" ++ show (pretty n)
++ "\nmust have both type\n" ++ show (pretty t1)
++ "\nand type\n" ++ show (pretty t2)
++ "\nin the target signature and hence "
++ "the view is ill-formed."
, Result.diagPos = nullRange
}
incompatibleViewError2 :: NAME -> TYPE -> TYPE -> Result.Diagnosis
incompatibleViewError2 n t1 t2 =
Result.Diag
{ Result.diagKind = Result.Error
, Result.diagString = "Symbol\n" ++ show (pretty n)
++ "\nmust have type\n" ++ show (pretty t1)
++ "\nbut instead has type\n" ++ show (pretty t2)
++ "\nin the target signature and hence "
++ "the view is ill-formed."
, Result.diagPos = nullRange
}
noSymToMapError :: NAME -> TYPE -> Result.Diagnosis
noSymToMapError n t =
Result.Diag
{ Result.diagKind = Result.Error
, Result.diagString = "Symbol\n" ++ show (pretty n)
++ "\ncannot be mapped to anything as the target "
++ "signature contains no symbols of type\n"
++ show (pretty t)
, Result.diagPos = nullRange
}
manySymToMapError :: NAME -> TYPE -> [NAME] -> Result.Diagnosis
manySymToMapError n t ss =
Result.Diag
{ Result.diagKind = Result.Error
, Result.diagString = "Symbol\n" ++ show (pretty n)
++ "\ncannot be uniquely mapped as the target "
++ "signature contains multiple symbols of type\n"
++ show (pretty t) ++ "\n namely\n"
++ show (printNames ss)
, Result.diagPos = nullRange
}
noSubsigError :: Sign -> Sign -> Result.Diagnosis
noSubsigError sig1 sig2 =
Result.Diag
{ Result.diagKind = Result.Error
, Result.diagString = "Signature\n" ++ show (pretty sig1)
++ "\nis not a subsignature of\n"
++ show (pretty sig2)
++ "\nand hence the inclusion morphism "
++ "cannot be constructed."
, Result.diagPos = nullRange
}
incompatibleMapError :: NAME -> NAME -> NAME -> Result.Diagnosis
incompatibleMapError n n1 n2 =
Result.Diag
{ Result.diagKind = Result.Error
, Result.diagString = "Symbol\n" ++ show (pretty n)
++ "\nis mapped both to\n" ++ show (pretty n1)
++ "\nand\n" ++ show (pretty n2)
++ "\nin the target signature and hence "
++ "the morphism union cannot be constructed."
, Result.diagPos = nullRange
}
invalidMorphError :: Morphism -> Morphism -> Result.Diagnosis
invalidMorphError m1 m2 =
Result.Diag
{ Result.diagKind = Result.Error
, Result.diagString = "The combination of morphisms\n" ++ show (pretty m1)
++ "\nand\n" ++ show (pretty m2)
++ "\nis not a valid morphism and hence "
++ "their union cannot be constructed."
, Result.diagPos = nullRange
}
symsNotInSigError :: Set.Set NAME -> Sign -> Result.Diagnosis
symsNotInSigError syms sig =
Result.Diag
{ Result.diagKind = Result.Error
, Result.diagString = "The symbols\n"
++ show (printNames $ Set.toList syms)
++ "\nare not in the signature\n"
++ show (pretty sig)
++ "\nand hence the (co)generated signature "
++ "cannot be constructed."
, Result.diagPos = nullRange
}
|
nevrenato/HetsAlloy
|
DFOL/Morphism.hs
|
gpl-2.0
| 16,137 | 0 | 21 | 5,196 | 4,062 | 2,048 | 2,014 | 298 | 6 |
{-# LANGUAGE MagicHash #-}
module Darcs.Patch.Witnesses.Unsafe
( unsafeCoerceP
, unsafeCoercePStart
, unsafeCoercePEnd
, unsafeCoerceP2
, unsafeCoerceP1
) where
import GHC.Base (unsafeCoerce#)
unsafeCoerceP :: a wX wY -> a wB wC
unsafeCoerceP = unsafeCoerce#
unsafeCoercePStart :: a wX1 wY -> a wX2 wY
unsafeCoercePStart = unsafeCoerce#
unsafeCoercePEnd :: a wX wY1 -> a wX wY2
unsafeCoercePEnd = unsafeCoerce#
unsafeCoerceP2 :: t wW wX wY wZ -> t wA wB wC wD
unsafeCoerceP2 = unsafeCoerce#
unsafeCoerceP1 :: a wX -> a wY
unsafeCoerceP1 = unsafeCoerce#
|
DavidAlphaFox/darcs
|
src/Darcs/Patch/Witnesses/Unsafe.hs
|
gpl-2.0
| 583 | 0 | 6 | 113 | 160 | 86 | 74 | 18 | 1 |
--
--
-- (C) 2011-14 Nicola Bonelli <[email protected]>
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software Foundation,
-- Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
--
-- The full GNU General Public License is included in this distribution in
-- the file called "COPYING".
module Daemon where
import Control.Concurrent
import Control.Monad
import Data.List
import Data.List.Split
import System.Log.Logger
import System.Directory
import System.FilePath
import System.IO
import System.Process
import System.Posix.Process
import System.Exit
import Foreign.ForeignPtr
import Foreign.Ptr
import Network.PFq.Default
import Options
import PFQdaemon
import Network.PFq as Q
daemon :: Options -> IO () -> IO ()
daemon opts closefds = forever $ do
(src, dst) <- getConfigFiles opts
new <- newerFile src dst
when new $ rebuildRestart opts closefds
threadDelay 1000000
rebuildRestart :: Options -> IO () -> IO ()
rebuildRestart opts closefds = do
infoM "daemon" "Configuration updated. Rebuilding..."
(src, dst) <- getConfigFiles opts
copyFile src dst
runCompiler >>= \(ec,_,msg) -> if ec == ExitSuccess
then infoM "daemon" "Done. Restarting..." >> closefds >> executeFile "pfqd" False ["-c" , src, "-d"] Nothing
else mapM_ (errorM "daemon") (lines $ replace "PFQconf.hs" (config_file opts) msg)
getConfigFiles :: Options -> IO (FilePath, FilePath)
getConfigFiles opts = getAppUserDataDirectory "pfqd" >>=
\udata -> let src = config_file opts
dst = udata </> "PFQconf.hs" in return (src, dst)
newerFile :: FilePath -> FilePath -> IO Bool
newerFile a b = do
at <- getModificationTime a
be <- doesFileExist b
if not be
then return True
else do bt <- getModificationTime b
return ( at > bt )
replace :: Eq a => [a] -> [a] -> [a] -> [a]
replace old new = intercalate new . splitOn old
runCompiler :: IO (ExitCode, String, String)
runCompiler = readProcessWithExitCode "ghc" ["--make", "Main", "-o", "pfqd", "-lpfq"] ""
|
Mr-Click/PFQ
|
user/pfqd/src/Daemon.hs
|
gpl-2.0
| 2,649 | 0 | 15 | 538 | 613 | 327 | 286 | 48 | 2 |
{-
Copyright (C) 2012-2014 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
import Text.Pandoc
import Criterion.Main
import Criterion.Types (Config(..))
import Data.Maybe (mapMaybe)
import Debug.Trace (trace)
import Text.Pandoc.Error
import Control.Applicative
readerBench :: Pandoc
-> (String, ReaderOptions -> String -> IO (Either PandocError Pandoc))
-> Maybe Benchmark
readerBench doc (name, reader) =
case lookup name writers of
Just (PureStringWriter writer) ->
let inp = writer def{ writerWrapText = True} doc
in return $ bench (name ++ " reader") $ nfIO $
(fmap handleError <$> reader def{ readerSmart = True }) inp
_ -> trace ("\nCould not find writer for " ++ name ++ "\n") Nothing
writerBench :: Pandoc
-> (String, WriterOptions -> Pandoc -> String)
-> Benchmark
writerBench doc (name, writer) = bench (name ++ " writer") $ nf
(writer def{ writerWrapText = True }) doc
main :: IO ()
main = do
inp <- readFile "tests/testsuite.txt"
let opts = def{ readerSmart = True }
let doc = handleError $ readMarkdown opts inp
let readers' = [(n,r) | (n, StringReader r) <- readers]
let readerBs = mapMaybe (readerBench doc)
$ filter (\(n,_) -> n /="haddock") readers'
let writers' = [(n,w) | (n, PureStringWriter w) <- writers]
let writerBs = map (writerBench doc)
$ writers'
defaultMainWith defaultConfig{ timeLimit = 6.0 }
(writerBs ++ readerBs)
|
poxu/pandoc
|
benchmark/benchmark-pandoc.hs
|
gpl-2.0
| 2,173 | 0 | 16 | 486 | 526 | 275 | 251 | 35 | 2 |
module Main where
import System.Environment(getArgs)
import Control.Monad
freeSpaces :: Int -> Int -> [Int] -> [Int]
freeSpaces l p [] = [l-p]
freeSpaces l p (x:xs) = (x-p) : freeSpaces l x xs
placeBats :: Int -> Int -> Int -> Int -> Int
placeBats ld d rd s = if s >= ld + rd then ((s - ld - rd) `div` d) + 1 else 0
determineMaxBats :: Int -> Int -> [Int] -> Int
determineMaxBats l d xs =
case freeSpaces l 0 xs of
[y] -> placeBats 6 d 6 y
(y:ys) -> placeBats 6 d d y +
sum (map (placeBats d d d) (init ys)) +
placeBats d d 6 (last ys)
processLine :: String -> String
processLine line =
let (l:d:_:xs) = map read $ words line
in show $ determineMaxBats l d xs
main :: IO ()
main = liftM head getArgs >>= liftM lines . readFile >>= mapM_ (putStrLn . processLine)
|
cryptica/CodeEval
|
Challenges/146_BatsChallenge/main.hs
|
gpl-3.0
| 860 | 0 | 14 | 258 | 418 | 215 | 203 | 21 | 2 |
{-|
Module : Network.Gopher.Util
Stability : experimental
Portability : POSIX
Helper utilities used within the library and the server which also could be useful for other application code.
-}
{-# LANGUAGE OverloadedStrings #-}
module Network.Gopher.Util (
-- * Security
santinizePath
, santinizeIfNotUrl
-- * String Encoding
, asciiOrd
, asciiChr
, uEncode
, uDecode
-- * Misc Helpers
, stripNewline
) where
import Data.ByteString (ByteString ())
import qualified Data.ByteString as B
import Data.Char (ord, chr)
import Data.List (isPrefixOf)
import qualified Data.String.UTF8 as U
import Data.Word (Word8 ())
import System.FilePath.Posix (pathSeparator, normalise, joinPath, splitPath)
-- | 'chr' a 'Word8'
asciiChr :: Word8 -> Char
asciiChr = chr . fromIntegral
-- | 'ord' a 'Word8'
asciiOrd :: Char -> Word8
asciiOrd = fromIntegral . ord
-- | Encode a 'String' to a UTF-8 'ByteString'
uEncode :: String -> ByteString
uEncode = B.pack . U.encode
-- | Decode a UTF-8 'ByteString' to a 'String'
uDecode :: ByteString -> String
uDecode = fst . U.decode . B.unpack
-- | Strip @\\r@ and @\\n@ from 'ByteString's
stripNewline :: ByteString -> ByteString
stripNewline s
| B.null s = B.empty
| B.head s `elem`
(map (fromIntegral . ord) "\n\r") = stripNewline (B.tail s)
| otherwise = B.head s `B.cons` stripNewline (B.tail s)
-- | Normalise a path and prevent <https://en.wikipedia.org/wiki/Directory_traversal_attack directory traversal attacks>.
santinizePath :: FilePath -> FilePath
santinizePath path = joinPath . filter (\p -> p /= ".." && p /= ".") . splitPath . normalise $ path
santinizeIfNotUrl :: FilePath -> FilePath
santinizeIfNotUrl path = if "URL:" `isPrefixOf` path
then path
else santinizePath path
|
lukasepple/spacecookie
|
src/Network/Gopher/Util.hs
|
gpl-3.0
| 1,832 | 0 | 14 | 382 | 410 | 234 | 176 | 36 | 2 |
module HandlersClient (
doClient,
lookupHandlerClient
)
where
import GHC.IO.Handle
import System.IO
import Control.Monad
import Control.Concurrent.STM.TChan
import PupEventsPQueue
import qualified PupEventsClient as Client
import Graphics.Rendering.OpenGL.GL (($=))
import qualified Graphics.Rendering.OpenGL.GL as GL
import qualified PupEventsClient
import Events
-- |The doClient method is how we start the client side of the
-- Pup-Events framework. It's called by the main application.
doClient ip priorities =
do (q1, q2, dc) <- Client.client ip priorities lookupPriority lookupUnHandler parsers
return (q1, q2, dc)
---------------
-- GameLeave --
---------------
--gameLeaveHandlerClient :: Event -> IO Event
--gameLeaveHandlerClient e@(GameLeave player game) = return e
-----------------------
---- PlayerCollision --
-----------------------
--playerCollisionHandlerClient :: Event -> IO Event
--playerCollisionHandlerClient e@(PlayerCollision game players) = return e
---------------
---- GameEnd --
---------------
--gameEndHandlerClient :: Event -> IO Event
--gameEndHandlerClient e@(GameEnd game) = return e
-------------------
---- GameAdvance --
-------------------
--gameAdvanceHandlerClient :: Event -> IO Event
--gameAdvanceHandlerClient e@(GameAdvance game) = return e
-----------------
---- GameStart --
-----------------
--gameStartHandlerClient :: Event -> IO Event
--gameStartHandlerClient e@(GameStart game) = return e
----------------
---- GameJoin --
----------------
--gameJoinHandlerClient :: Event -> IO Event
--gameJoinHandlerClient e@(GameJoin player game) = return e
---------------
---- GameNew --
---------------
--gameNewHandlerClient :: Event -> IO Event
--gameNewHandlerClient e@(GameNew players) = return e
-----------------
---- MouseMove --
-----------------
--mouseMoveHandlerClient :: Event -> IO Event
--mouseMoveHandlerClient e@(MouseMove p1 p2) = return e
-----------
-- Login --
-----------
loginHandlerClient :: Event -> IO Event
loginHandlerClient e@(Login (Username user)) = return e
------------
-- Logout --
------------
logoutHandlerClient :: Event -> IO Event
logoutHandlerClient e@(Logout _) =
do putStrLn "Logged out"
return e
-----------------------
-- AlreadyRegistered --
-----------------------
alreadyRegisteredHandlerClient :: Event -> IO Event
alreadyRegisteredHandlerClient e@(Error AlreadyRegistered) =
do putStrLn "Already registered with this IP!"
return e
----------------
-- UserExists --
----------------
userExistsHandlerClient :: Event -> IO Event
userExistsHandlerClient e@(Error UserExists) =
do putStrLn $ "The username already exists!"
return e
-- |Returns the specified Event's handler function. This has a weird type signature because it's returning a function.
lookupHandlerClient :: Event -> (Event -> IO Event)
--lookupHandlerClient (MouseMove _ _ ) = mouseMoveHandlerClient
lookupHandlerClient (Login _ ) = loginHandlerClient
lookupHandlerClient (Logout _) = logoutHandlerClient
--lookupHandlerClient (GameNew _ ) = gameNewHandlerClient
--lookupHandlerClient (GameJoin _ _ ) = gameJoinHandlerClient
--lookupHandlerClient (GameLeave _ _ ) = gameLeaveHandlerClient
--lookupHandlerClient (GameStart _ ) = gameStartHandlerClient
--lookupHandlerClient (GameAdvance _ ) = gameAdvanceHandlerClient
--lookupHandlerClient (GameEnd _ ) = gameEndHandlerClient
--lookupHandlerClient (PlayerCollision _ _ ) = playerCollisionHandlerClient
lookupHandlerClient (Error AlreadyRegistered) = alreadyRegisteredHandlerClient
lookupHandlerClient (Error UserExists) = userExistsHandlerClient
|
RocketPuppy/PupCollide
|
Client/HandlersClient.hs
|
gpl-3.0
| 3,619 | 0 | 10 | 473 | 429 | 259 | 170 | 35 | 1 |
double x = x + x
|
forflo/snippetbin
|
haskell/double.hs
|
gpl-3.0
| 17 | 0 | 5 | 6 | 13 | 6 | 7 | 1 | 1 |
{-# LANGUAGE ImplicitParams #-}
-- | This module contains Version of Ampersand
module Ampersand.Basics.Version
( ampersandVersionStr
, ampersandVersionWithoutBuildTimeStr
, fatal
) where
import Ampersand.Basics.BuildInfo_Generated
import Ampersand.Basics.Exit
import Ampersand.Basics.Prelude
import GHC.Stack
maxLen :: Int
maxLen = 1500000 -- This trick is to make sure the process is terminated after the error.
-- If the string is too long, it seems that the sentinel `hangs`.
-- But what is too long???
-- | a function to create error message in a structured way, containing the version of Ampersand.
-- It throws an error, showing a (module)name and a number. This makes debugging pretty easy.
fatal :: (HasCallStack) => String -> a
fatal msg
= exitWith . Fatal . lines $
("! "++ampersandVersionWithoutBuildTimeStr++"\n"++
lazyCutoff maxLen msg++"\n"++
prettyCallStack callStack
)
where lazyCutoff _ [] = ""
lazyCutoff 0 _ = "\n<Ampersand's fatal-mechanism has removed the rest of this error message.>"
lazyCutoff n (c:cs) = c: lazyCutoff (n-1) cs
{-# NOINLINE fatal #-}
-- | String, containing the Ampersand version, including the build timestamp.
ampersandVersionStr :: String
ampersandVersionStr = ampersandVersionWithoutBuildTimeStr ++", build time: "++buildTimeStr
-- | String, containing the Ampersand version. The part unto the first space is used as name of the release (appVeyor)
ampersandVersionWithoutBuildTimeStr :: String
ampersandVersionWithoutBuildTimeStr = "Ampersand-v"++cabalVersionStr++" ["++gitInfoStr++"]"
{-
#1.#2.#3[$gitInfo] : #1 major version; #2 student release version; #3 production fix version (normally 0 );
$gitInfo: "branch:SHA", followed by a '*' if the working copy was dirty: e.g. "master:0eea5e3*"
-}
|
AmpersandTarski/ampersand
|
src/Ampersand/Basics/Version.hs
|
gpl-3.0
| 1,893 | 0 | 11 | 391 | 230 | 131 | 99 | -1 | -1 |
process s =
upCase s >>= \upStr ->
toWords upStr
|
hmemcpy/milewski-ctfp-pdf
|
src/content/3.4/code/haskell/snippet20.hs
|
gpl-3.0
| 61 | 0 | 7 | 22 | 24 | 11 | 13 | 3 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Composer.Projects.Locations.Environments.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Update an environment.
--
-- /See:/ <https://cloud.google.com/composer/ Cloud Composer API Reference> for @composer.projects.locations.environments.patch@.
module Network.Google.Resource.Composer.Projects.Locations.Environments.Patch
(
-- * REST Resource
ProjectsLocationsEnvironmentsPatchResource
-- * Creating a Request
, projectsLocationsEnvironmentsPatch
, ProjectsLocationsEnvironmentsPatch
-- * Request Lenses
, plepXgafv
, plepUploadProtocol
, plepUpdateMask
, plepAccessToken
, plepUploadType
, plepPayload
, plepName
, plepCallback
) where
import Network.Google.Composer.Types
import Network.Google.Prelude
-- | A resource alias for @composer.projects.locations.environments.patch@ method which the
-- 'ProjectsLocationsEnvironmentsPatch' request conforms to.
type ProjectsLocationsEnvironmentsPatchResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "updateMask" GFieldMask :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Environment :>
Patch '[JSON] Operation
-- | Update an environment.
--
-- /See:/ 'projectsLocationsEnvironmentsPatch' smart constructor.
data ProjectsLocationsEnvironmentsPatch =
ProjectsLocationsEnvironmentsPatch'
{ _plepXgafv :: !(Maybe Xgafv)
, _plepUploadProtocol :: !(Maybe Text)
, _plepUpdateMask :: !(Maybe GFieldMask)
, _plepAccessToken :: !(Maybe Text)
, _plepUploadType :: !(Maybe Text)
, _plepPayload :: !Environment
, _plepName :: !Text
, _plepCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsEnvironmentsPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plepXgafv'
--
-- * 'plepUploadProtocol'
--
-- * 'plepUpdateMask'
--
-- * 'plepAccessToken'
--
-- * 'plepUploadType'
--
-- * 'plepPayload'
--
-- * 'plepName'
--
-- * 'plepCallback'
projectsLocationsEnvironmentsPatch
:: Environment -- ^ 'plepPayload'
-> Text -- ^ 'plepName'
-> ProjectsLocationsEnvironmentsPatch
projectsLocationsEnvironmentsPatch pPlepPayload_ pPlepName_ =
ProjectsLocationsEnvironmentsPatch'
{ _plepXgafv = Nothing
, _plepUploadProtocol = Nothing
, _plepUpdateMask = Nothing
, _plepAccessToken = Nothing
, _plepUploadType = Nothing
, _plepPayload = pPlepPayload_
, _plepName = pPlepName_
, _plepCallback = Nothing
}
-- | V1 error format.
plepXgafv :: Lens' ProjectsLocationsEnvironmentsPatch (Maybe Xgafv)
plepXgafv
= lens _plepXgafv (\ s a -> s{_plepXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plepUploadProtocol :: Lens' ProjectsLocationsEnvironmentsPatch (Maybe Text)
plepUploadProtocol
= lens _plepUploadProtocol
(\ s a -> s{_plepUploadProtocol = a})
-- | Required. A comma-separated list of paths, relative to \`Environment\`,
-- of fields to update. For example, to set the version of scikit-learn to
-- install in the environment to 0.19.0 and to remove an existing
-- installation of numpy, the \`updateMask\` parameter would include the
-- following two \`paths\` values:
-- \"config.softwareConfig.pypiPackages.scikit-learn\" and
-- \"config.softwareConfig.pypiPackages.numpy\". The included patch
-- environment would specify the scikit-learn version as follows: {
-- \"config\":{ \"softwareConfig\":{ \"pypiPackages\":{
-- \"scikit-learn\":\"==0.19.0\" } } } } Note that in the above example,
-- any existing PyPI packages other than scikit-learn and numpy will be
-- unaffected. Only one update type may be included in a single request\'s
-- \`updateMask\`. For example, one cannot update both the PyPI packages
-- and labels in the same request. However, it is possible to update
-- multiple members of a map field simultaneously in the same request. For
-- example, to set the labels \"label1\" and \"label2\" while clearing
-- \"label3\" (assuming it already exists), one can provide the paths
-- \"labels.label1\", \"labels.label2\", and \"labels.label3\" and populate
-- the patch environment as follows: { \"labels\":{
-- \"label1\":\"new-label1-value\" \"label2\":\"new-label2-value\" } } Note
-- that in the above example, any existing labels that are not included in
-- the \`updateMask\` will be unaffected. It is also possible to replace an
-- entire map field by providing the map field\'s path in the
-- \`updateMask\`. The new value of the field will be that which is
-- provided in the patch environment. For example, to delete all
-- pre-existing user-specified PyPI packages and install botocore at
-- version 1.7.14, the \`updateMask\` would contain the path
-- \"config.softwareConfig.pypiPackages\", and the patch environment would
-- be the following: { \"config\":{ \"softwareConfig\":{ \"pypiPackages\":{
-- \"botocore\":\"==1.7.14\" } } } } **Note:** Only the following fields
-- can be updated: * \`config.softwareConfig.pypiPackages\` * Replace all
-- custom custom PyPI packages. If a replacement package map is not
-- included in \`environment\`, all custom PyPI packages are cleared. It is
-- an error to provide both this mask and a mask specifying an individual
-- package. * \`config.softwareConfig.pypiPackages.\`packagename * Update
-- the custom PyPI package *packagename*, preserving other packages. To
-- delete the package, include it in \`updateMask\`, and omit the mapping
-- for it in \`environment.config.softwareConfig.pypiPackages\`. It is an
-- error to provide both a mask of this form and the
-- \`config.softwareConfig.pypiPackages\` mask. * \`labels\` * Replace all
-- environment labels. If a replacement labels map is not included in
-- \`environment\`, all labels are cleared. It is an error to provide both
-- this mask and a mask specifying one or more individual labels. *
-- \`labels.\`labelName * Set the label named *labelName*, while preserving
-- other labels. To delete the label, include it in \`updateMask\` and omit
-- its mapping in \`environment.labels\`. It is an error to provide both a
-- mask of this form and the \`labels\` mask. * \`config.nodeCount\` *
-- Horizontally scale the number of nodes in the environment. An integer
-- greater than or equal to 3 must be provided in the \`config.nodeCount\`
-- field. * \`config.webServerNetworkAccessControl\` * Replace the
-- environment\'s current \`WebServerNetworkAccessControl\`. *
-- \`config.databaseConfig\` * Replace the environment\'s current
-- \`DatabaseConfig\`. * \`config.webServerConfig\` * Replace the
-- environment\'s current \`WebServerConfig\`. *
-- \`config.softwareConfig.airflowConfigOverrides\` * Replace all Apache
-- Airflow config overrides. If a replacement config overrides map is not
-- included in \`environment\`, all config overrides are cleared. It is an
-- error to provide both this mask and a mask specifying one or more
-- individual config overrides. *
-- \`config.softwareConfig.airflowConfigOverrides.\`section-name * Override
-- the Apache Airflow config property *name* in the section named
-- *section*, preserving other properties. To delete the property override,
-- include it in \`updateMask\` and omit its mapping in
-- \`environment.config.softwareConfig.airflowConfigOverrides\`. It is an
-- error to provide both a mask of this form and the
-- \`config.softwareConfig.airflowConfigOverrides\` mask. *
-- \`config.softwareConfig.envVariables\` * Replace all environment
-- variables. If a replacement environment variable map is not included in
-- \`environment\`, all custom environment variables are cleared. It is an
-- error to provide both this mask and a mask specifying one or more
-- individual environment variables.
plepUpdateMask :: Lens' ProjectsLocationsEnvironmentsPatch (Maybe GFieldMask)
plepUpdateMask
= lens _plepUpdateMask
(\ s a -> s{_plepUpdateMask = a})
-- | OAuth access token.
plepAccessToken :: Lens' ProjectsLocationsEnvironmentsPatch (Maybe Text)
plepAccessToken
= lens _plepAccessToken
(\ s a -> s{_plepAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plepUploadType :: Lens' ProjectsLocationsEnvironmentsPatch (Maybe Text)
plepUploadType
= lens _plepUploadType
(\ s a -> s{_plepUploadType = a})
-- | Multipart request metadata.
plepPayload :: Lens' ProjectsLocationsEnvironmentsPatch Environment
plepPayload
= lens _plepPayload (\ s a -> s{_plepPayload = a})
-- | The relative resource name of the environment to update, in the form:
-- \"projects\/{projectId}\/locations\/{locationId}\/environments\/{environmentId}\"
plepName :: Lens' ProjectsLocationsEnvironmentsPatch Text
plepName = lens _plepName (\ s a -> s{_plepName = a})
-- | JSONP
plepCallback :: Lens' ProjectsLocationsEnvironmentsPatch (Maybe Text)
plepCallback
= lens _plepCallback (\ s a -> s{_plepCallback = a})
instance GoogleRequest
ProjectsLocationsEnvironmentsPatch
where
type Rs ProjectsLocationsEnvironmentsPatch =
Operation
type Scopes ProjectsLocationsEnvironmentsPatch =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsLocationsEnvironmentsPatch'{..}
= go _plepName _plepXgafv _plepUploadProtocol
_plepUpdateMask
_plepAccessToken
_plepUploadType
_plepCallback
(Just AltJSON)
_plepPayload
composerService
where go
= buildClient
(Proxy ::
Proxy ProjectsLocationsEnvironmentsPatchResource)
mempty
|
brendanhay/gogol
|
gogol-composer/gen/Network/Google/Resource/Composer/Projects/Locations/Environments/Patch.hs
|
mpl-2.0
| 10,757 | 0 | 17 | 2,001 | 927 | 569 | 358 | 127 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Gmail.Users.Settings.UpdatePop
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates POP settings.
--
-- /See:/ <https://developers.google.com/gmail/api/ Gmail API Reference> for @gmail.users.settings.updatePop@.
module Network.Google.Resource.Gmail.Users.Settings.UpdatePop
(
-- * REST Resource
UsersSettingsUpdatePopResource
-- * Creating a Request
, usersSettingsUpdatePop
, UsersSettingsUpdatePop
-- * Request Lenses
, usupXgafv
, usupUploadProtocol
, usupAccessToken
, usupUploadType
, usupPayload
, usupUserId
, usupCallback
) where
import Network.Google.Gmail.Types
import Network.Google.Prelude
-- | A resource alias for @gmail.users.settings.updatePop@ method which the
-- 'UsersSettingsUpdatePop' request conforms to.
type UsersSettingsUpdatePopResource =
"gmail" :>
"v1" :>
"users" :>
Capture "userId" Text :>
"settings" :>
"pop" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] PopSettings :>
Put '[JSON] PopSettings
-- | Updates POP settings.
--
-- /See:/ 'usersSettingsUpdatePop' smart constructor.
data UsersSettingsUpdatePop =
UsersSettingsUpdatePop'
{ _usupXgafv :: !(Maybe Xgafv)
, _usupUploadProtocol :: !(Maybe Text)
, _usupAccessToken :: !(Maybe Text)
, _usupUploadType :: !(Maybe Text)
, _usupPayload :: !PopSettings
, _usupUserId :: !Text
, _usupCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'UsersSettingsUpdatePop' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'usupXgafv'
--
-- * 'usupUploadProtocol'
--
-- * 'usupAccessToken'
--
-- * 'usupUploadType'
--
-- * 'usupPayload'
--
-- * 'usupUserId'
--
-- * 'usupCallback'
usersSettingsUpdatePop
:: PopSettings -- ^ 'usupPayload'
-> UsersSettingsUpdatePop
usersSettingsUpdatePop pUsupPayload_ =
UsersSettingsUpdatePop'
{ _usupXgafv = Nothing
, _usupUploadProtocol = Nothing
, _usupAccessToken = Nothing
, _usupUploadType = Nothing
, _usupPayload = pUsupPayload_
, _usupUserId = "me"
, _usupCallback = Nothing
}
-- | V1 error format.
usupXgafv :: Lens' UsersSettingsUpdatePop (Maybe Xgafv)
usupXgafv
= lens _usupXgafv (\ s a -> s{_usupXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
usupUploadProtocol :: Lens' UsersSettingsUpdatePop (Maybe Text)
usupUploadProtocol
= lens _usupUploadProtocol
(\ s a -> s{_usupUploadProtocol = a})
-- | OAuth access token.
usupAccessToken :: Lens' UsersSettingsUpdatePop (Maybe Text)
usupAccessToken
= lens _usupAccessToken
(\ s a -> s{_usupAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
usupUploadType :: Lens' UsersSettingsUpdatePop (Maybe Text)
usupUploadType
= lens _usupUploadType
(\ s a -> s{_usupUploadType = a})
-- | Multipart request metadata.
usupPayload :: Lens' UsersSettingsUpdatePop PopSettings
usupPayload
= lens _usupPayload (\ s a -> s{_usupPayload = a})
-- | User\'s email address. The special value \"me\" can be used to indicate
-- the authenticated user.
usupUserId :: Lens' UsersSettingsUpdatePop Text
usupUserId
= lens _usupUserId (\ s a -> s{_usupUserId = a})
-- | JSONP
usupCallback :: Lens' UsersSettingsUpdatePop (Maybe Text)
usupCallback
= lens _usupCallback (\ s a -> s{_usupCallback = a})
instance GoogleRequest UsersSettingsUpdatePop where
type Rs UsersSettingsUpdatePop = PopSettings
type Scopes UsersSettingsUpdatePop =
'["https://www.googleapis.com/auth/gmail.settings.basic"]
requestClient UsersSettingsUpdatePop'{..}
= go _usupUserId _usupXgafv _usupUploadProtocol
_usupAccessToken
_usupUploadType
_usupCallback
(Just AltJSON)
_usupPayload
gmailService
where go
= buildClient
(Proxy :: Proxy UsersSettingsUpdatePopResource)
mempty
|
brendanhay/gogol
|
gogol-gmail/gen/Network/Google/Resource/Gmail/Users/Settings/UpdatePop.hs
|
mpl-2.0
| 5,182 | 0 | 20 | 1,264 | 785 | 457 | 328 | 117 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.