code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
-- Copyright (c) 2014-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is distributed under the terms of a BSD license,
-- found in the LICENSE file.
{-# LANGUAGE CPP, OverloadedStrings #-}
module Main where
import Test.Framework (defaultMain)
import Test.Framework.Providers.HUnit (hUnitTestToTests)
import AllTests
main :: IO ()
main = defaultMain $ hUnitTestToTests allTests
|
jiayuanmark/Haxl
|
tests/TestMain.hs
|
bsd-3-clause
| 403 | 0 | 6 | 59 | 57 | 36 | 21 | 7 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pt-BR">
<title>ViewState</title>
<maps>
<homeID>viewstate</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/viewstate/src/main/javahelp/help_pt_BR/helpset_pt_BR.hs
|
apache-2.0
| 960 | 77 | 66 | 155 | 404 | 205 | 199 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="zh-CN">
<title>Front-End Scanner | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/frontendscanner/src/main/javahelp/org/zaproxy/zap/extension/frontendscanner/resources/help_zh_CN/helpset_zh_CN.hs
|
apache-2.0
| 978 | 78 | 67 | 159 | 417 | 211 | 206 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module T15815A where
mkFoo tyQ = [d|
foo :: a ~ $(tyQ) => a
foo = undefined
|]
|
sdiehl/ghc
|
testsuite/tests/th/T15815A.hs
|
bsd-3-clause
| 123 | 0 | 5 | 33 | 17 | 12 | 5 | -1 | -1 |
{-# LANGUAGE GADTs, RankNTypes #-}
module Main where
import Control.Applicative (Applicative(..))
import Control.Monad (liftM, ap)
-- abstract syntax -------------------------------------------------------------
data Ty t where
Bool :: Ty Bool
Arr :: Ty a -> Ty b -> Ty (a -> b)
data Exp g t where
Var :: Var g t -> Exp g t
Lam :: Ty a -> Exp (g,a) b -> Exp g (a->b)
App :: Exp g (s -> t) -> Exp g s -> Exp g t
If :: Exp g Bool -> Exp g t -> Exp g t -> Exp g t
ETrue :: Exp g Bool
EFalse :: Exp g Bool
data Var g t where
ZVar :: Var (h,t) t
SVar :: Var h t -> Var (h,s) t
-- smart constructors ----------------------------------------------------------
lamE :: Ty s -> (Exp (g,s) s -> Exp (g,s) t) -> Exp g (s -> t)
lamE s f = Lam s (f (Var ZVar))
ifE :: Exp g Bool -> Exp g t -> Exp g t -> Exp g t
ifE t ETrue EFalse = t
ifE t e e' = if eqE e e' then e else If t e e'
-- boring equality tests -------------------------------------------------------
eqB :: BoxExp t -> BoxExp s -> Bool
eqB (Box e) (Box e_) = eqE e e_
eqE :: Exp g t -> Exp h s -> Bool
eqE (Var x) (Var y) = eqV x y
eqE (Lam s e) (Lam s_ e_) = eqT s s_ && eqE e e_
eqE (App e1 e2) (App e1_ e2_) = eqE e1 e1_ && eqE e2 e2_
eqE (If e1 e2 e3) (If e1_ e2_ e3_) = eqE e1 e1_ && (eqE e2 e2_ && eqE e3 e3_)
eqE (ETrue) (ETrue) = True
eqE (EFalse) (EFalse) = True
eqE _ _ = False
eqT :: Ty t -> Ty s -> Bool
eqT (Arr s t) (Arr s_ t_) = eqT s s_ && eqT t t_
eqT Bool Bool = True
eqT _ _ = False
eqV :: Var g t -> Var h s -> Bool
eqV (SVar x) (SVar y) = eqV x y
eqV ZVar ZVar = True
eqV _ _ = False
-- evaluation ------------------------------------------------------------------
var :: Var g t -> g -> t
var ZVar (_,t) = t
var (SVar x) (h,s) = var x h
eval :: Exp g t -> g -> t
eval (Var x) g = var x g
eval (Lam _ e) g = \a -> eval e (g,a)
eval (App e e') g = eval e g (eval e' g)
eval (ETrue) g = True
eval (EFalse) g = False
eval (If c t e) g = if eval c g then eval t g else eval e g
-- type inference --------------------------------------------------------------
data TyEnv g where
Nil :: TyEnv g
Cons :: Ty t -> TyEnv h -> TyEnv (h,t)
infer :: TyEnv g -> Exp g t -> Ty t
infer g (Var x) = inferVar g x
infer g (Lam t e) = Arr t (infer (Cons t g) e)
infer g (App e e') = case infer g e of Arr _ t -> t
infer g (ETrue) = Bool
infer g (EFalse) = Bool
infer g (If _ e _) = infer g e
inferVar :: TyEnv g -> Var g t -> Ty t
inferVar (Cons t h) (SVar x) = inferVar h x
inferVar (Cons t h) (ZVar) = t
-- tree monad ------------------------------------------------------------------
data Tree a = Val a | Choice (Tree a) (Tree a)
-- doesn't yet force trees to be fully balanced:
-- Val :: a -> Tree a Z
-- Choice :: Tree a n -> Tree a n -> Tree a (S n)
instance Functor Tree where
fmap = liftM
instance Applicative Tree where
pure = return
(<*>) = ap
instance Monad Tree where
return x = Val x
(Val a) >>= f = f a
(Choice l r) >>= f = Choice (l >>= f) (r >>= f)
tmap :: Monad m => (a->b) -> m a -> m b
tmap f x = do { a <- x; return (f a) }
flatten t = flatten_ t []
where
flatten_ (Val a) k = a:k
flatten_ (Choice l r) k = flatten_ l (flatten_ r k)
-- quote & friends -------------------------------------------------------------
-- for values --------------------------
enumV :: Ty t -> Tree t
questionsV :: Ty t -> [t -> Bool]
enumV Bool = Choice (Val True) (Val False)
enumV (Arr s t) = mkEnum (questionsV s) (enumV t)
where
mkEnum [] t = tmap const t
mkEnum (q:qs) es = do
f1 <- mkEnum qs es
f2 <- mkEnum qs es
return (\d -> if q d then f1 d else f2 d)
questionsV Bool = return (\x -> x)
questionsV (Arr s t) = do
d <- flatten (enumV s)
q <- questionsV t
return (\f -> q (f d))
-- for expressions ---------------------
enumE :: Ty t -> Tree (Exp g t)
questionsE :: Ty t -> [Exp g t -> Exp g Bool]
enumE Bool = Choice (Val ETrue) (Val EFalse)
enumE (Arr s t) = tmap (lamE s) (mkEnumE (questionsE s) (enumE t))
where
mkEnumE [] t = tmap const t
mkEnumE (q:qs) es = do
f1 <- mkEnumE qs es
f2 <- mkEnumE qs es
return (\d -> ifE (q d) (f1 d) (f2 d))
questionsE Bool = return (\x -> x)
questionsE (Arr s t) = do
d <- flatten (enumE s)
q <- questionsE t
return (\f -> q (App f d))
-- should be
-- find (List (Exp g Bool) n) -> Tree (Exp g a) n -> Exp g a
find :: [Exp g Bool] -> Tree (Exp g a) -> Exp g a
find [] (Val a) = a
find (b:bs) (Choice l r) = ifE b (find bs l) (find bs r)
find _ _ = error "bad arguments to find"
quote :: Ty t -> t -> Exp g t
quote Bool t = case t of True -> ETrue; False -> EFalse
quote (Arr s t) f = lamE s (\e -> find (do q <- questionsE s; return (q e))
(tmap (quote t . f) (enumV s)))
-- normalization (by evaluation) -----------------------------------------------
data BoxExp t = Box (forall g. Exp g t)
normalize :: Ty t -> BoxExp t -> BoxExp t
normalize s (Box e) = Box (quote s (eval e ()))
-- examples --------------------------------------------------------------------
b2b = Arr Bool Bool
b22b = Arr b2b b2b
zero = Var ZVar
one = Var (SVar ZVar)
once = Box (Lam b2b (Lam Bool (App one zero)))
twice = Box (Lam b2b (Lam Bool (App one (App one zero))))
thrice = Box (Lam b2b (Lam Bool (App one (App one (App one zero)))))
test = [ eqB (nf b22b thrice) (nf b22b once)
, eqB (nf b22b twice) (nf b22b once)]
where nf = normalize
main = print test
|
ezyang/ghc
|
testsuite/tests/gadt/nbe.hs
|
bsd-3-clause
| 5,877 | 0 | 15 | 1,811 | 2,815 | 1,407 | 1,408 | 128 | 3 |
{-# LANGUAGE MagicHash, UnboxedTuples #-}
module Main where
import GHC.Prim (Int#, Double#)
main :: IO ()
main = let f = int2Integer# 0# in putStrLn ""
{-# NOINLINE int2Integer# #-}
int2Integer# :: Int# -> (# Int#, Double# #)
int2Integer# x = (# x, 1.0## #)
|
wxwxwwxxx/ghc
|
testsuite/tests/typecheck/should_run/tcrun048.hs
|
bsd-3-clause
| 263 | 0 | 9 | 50 | 78 | 43 | 35 | 8 | 1 |
module Oden.CLI.Run where
import Oden.Backend
import Oden.Backend.Go
import Oden.Scanner
import Control.Monad.Reader
import System.Directory
import System.FilePath
import System.Process
import System.IO.Temp
import Oden.CLI
import Oden.CLI.Build
run :: FilePath -> CLI ()
run path = do
pkg <- compileFile (OdenSourceFile path ["main"])
tmpDir <- liftIO getTemporaryDirectory
tmp <- liftIO (createTempDirectory tmpDir "oden-run.go")
files <- liftEither (codegen (GoBackend tmp) pkg)
mapM_ writeCompiledFile files
case filter isMainPackage files of
[] -> liftIO $ exitWithMessage "Not a main package!"
[CompiledFile name _] -> liftIO $ callCommand ("go run " ++ name)
_ -> liftIO $ exitWithMessage "Cannot run with multiple main packages!"
where
isMainPackage :: CompiledFile -> Bool
isMainPackage (CompiledFile name _) = takeBaseName name == "main"
|
AlbinTheander/oden
|
cli/Oden/CLI/Run.hs
|
mit
| 983 | 0 | 13 | 246 | 266 | 135 | 131 | 24 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Data.Control ( Command (..)
, Control (..)
, control
) where
--import Control.Monad.Trans (liftIO)
import Data.Aeson
import Data.Functor
import qualified Network.MPD as MPD
data Command = Play
| Pause
-- | Stop
| Prev
| Next
deriving (Show, Read)
instance ToJSON Command where
toJSON Play = "PLAY"
toJSON Pause = "PAUSE"
toJSON Prev = "PREV"
toJSON Next = "NEXT"
-- toJSON Stop = "STOP"
instance FromJSON Command where
parseJSON (String s) = return $ case s of
"PLAY" -> Play
"PAUSE" -> Pause
"PREV" -> Prev
"NEXT" -> Next
-- _ -> fail $ "Could not parse" ++ s
data Control = Control { cmd :: Command } deriving (Show)
instance FromJSON Control where
parseJSON (Object v) = Control <$> v .: "cmd"
control :: Control -> MPD.MPD ()
control (Control Play) = MPD.pause False -- MPD.stSongPos <$> MPD.status >>= MPD.play
control (Control Pause) = MPD.pause True
control (Control Next) = MPD.next
control (Control Prev) = MPD.previous
|
kalhauge/vagnplayer
|
src/Data/Control.hs
|
mit
| 1,139 | 0 | 9 | 330 | 308 | 168 | 140 | 31 | 1 |
type Point = (Int, Int)
nextPoint :: Point -> Point -> Point
nextPoint (x,y) (x',y') = (x'+ a ,y' +b) where
a = x' - x
b = y' - y
checkPoints = do
cs <- map (\l -> (map (\x-> read x :: Int) . words) l) . drop 1 <$> lines <$> getContents
let inPoints = map (\[a,b,c,d]-> ((a,b), (c,d))) cs
points = map ( uncurry nextPoint) inPoints
formatted = map (\(x,y)-> (show x)++" "++(show y)) points
mapM_ putStrLn formatted
sockmatch = do
cs <- map (\x-> read x:: Int) . drop 1 <$> lines <$> getContents
mapM_ (print . (+ 1)) cs
main = print "placeholder"
|
ChrisCoffey/haskell_sandbox
|
hackerRank/maths.hs
|
mit
| 600 | 0 | 20 | 162 | 338 | 182 | 156 | 15 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-
Script for wiping out something entirely
- files.txt is prepare by calling window.getShipImgPath on poi-plugin-navy-album
exhausting all valid parameters.
-}
module NoMore
( main
) where
import Turtle
import Prelude hiding (FilePath)
import qualified Data.ByteString as BS
-- import Filesystem.Path.CurrentOS
import Control.Monad
getPoiCachePath :: MonadIO m => m FilePath
getPoiCachePath =
(\p -> p </> ".config" </> "poi" </> "MyCache" </> "KanColle") <$> home
main :: IO ()
main = do
emptyPngContent <- BS.readFile "empty.png"
resourcePaths <- lines <$> readFile "files.txt"
poiPath <- getPoiCachePath
forM_ resourcePaths $ \(_:rscRaw) -> do
let curPath = poiPath </> fromString rscRaw
mktree (directory curPath)
BS.writeFile (encodeString curPath) emptyPngContent
|
Javran/misc
|
naka-no-more/NoMore.hs
|
mit
| 846 | 0 | 15 | 149 | 199 | 103 | 96 | 19 | 1 |
{-# LANGUAGE QuasiQuotes, ScopedTypeVariables #-}
module Main where
import Paths_postgrest (version)
import PostgREST.App
import PostgREST.Middleware
import PostgREST.Error(errResponse)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Data.String.Conversions (cs)
import Network.Wai (strictRequestBody)
import Network.Wai.Middleware.Cors (cors)
import Network.Wai.Handler.Warp hiding (Connection)
import Network.Wai.Middleware.Gzip (gzip, def)
import Network.Wai.Middleware.Static (staticPolicy, only)
import Network.Wai.Middleware.RequestLogger (logStdout)
import Data.List (intercalate)
import Data.Version (versionBranch)
import Data.Functor.Identity
import Data.Text(Text)
import qualified Hasql as H
import qualified Hasql.Postgres as P
import Options.Applicative hiding (columns)
import PostgREST.Config (AppConfig(..), argParser, corsPolicy)
isServerVersionSupported = do
Identity (row :: Text) <- H.tx Nothing $ H.singleEx $ [H.stmt|SHOW server_version_num|]
return $ read (cs row) >= 90200
main :: IO ()
main = do
let opts = info (helper <*> argParser) $
fullDesc
<> progDesc (
"PostgREST "
<> prettyVersion
<> " / create a REST API to an existing Postgres database"
)
parserPrefs = prefs showHelpOnError
conf <- customExecParser parserPrefs opts
let port = configPort conf
unless (configSecure conf) $
putStrLn "WARNING, running in insecure mode, auth will be in plaintext"
unless ("secret" /= configJwtSecret conf) $
putStrLn "WARNING, running in insecure mode, JWT secret is the default value"
Prelude.putStrLn $ "Listening on port " ++
(show $ configPort conf :: String)
let userServerString = cs $ configServerString conf
userServerName = cs $ configServerName conf
serverString =
if userServerString /= "" then
userServerString
else
cs $ userServerName <> "/" <> prettyVersion
let pgSettings = P.ParamSettings (cs $ configDbHost conf)
(fromIntegral $ configDbPort conf)
(cs $ configDbUser conf)
(cs $ configDbPass conf)
(cs $ configDbName conf)
appSettings = setPort port
. setServerName serverString
$ defaultSettings
middle = logStdout
. (if configSecure conf then redirectInsecure else id)
. gzip def . cors corsPolicy
. staticPolicy (only [("favicon.ico", "static/favicon.ico")])
poolSettings <- maybe (fail "Improper session settings") return $
H.poolSettings (fromIntegral $ configPool conf) 30
pool :: H.Pool P.Postgres
<- H.acquirePool pgSettings poolSettings
resOrError <- H.session pool isServerVersionSupported
either (fail . show) (\supported -> unless supported $ fail "Cannot run in this PostgreSQL version, PostgREST needs at least 9.2.0") resOrError
runSettings appSettings $ middle $ \req respond -> do
body <- strictRequestBody req
resOrError <- liftIO $ H.session pool $ H.tx Nothing $
authenticated conf (app conf body) req
either (respond . errResponse) respond resOrError
where
prettyVersion = intercalate "." $ map show $ versionBranch version
|
framp/postgrest
|
src/PostgREST/Main.hs
|
mit
| 3,339 | 0 | 16 | 804 | 879 | 460 | 419 | 73 | 3 |
module Transformers where
import Data.Maybe
import Data.List
import Core
pathContains :: String -> PathMapsT
pathContains ss pathMaps =
filter (isJust . find (==ss) . subsequences . pathMapDest) pathMaps
fork :: [PathMapsT] -> PathMapsT
fork ts = (\pathMaps -> concatMap (\t -> t pathMaps) ts)
forkMap :: [a] -> (a -> PathMapsT) -> PathMapsT
forkMap xs t = fork $ map t xs
copyTo :: String -> PathMapsT
copyTo dest pathMaps = map (\pathMap -> pathMap { pathMapDest = dest }) pathMaps
|
rexim/mapie
|
src/Transformers.hs
|
mit
| 498 | 0 | 11 | 95 | 189 | 103 | 86 | 13 | 1 |
module Hunch.Constants where
maintainerName :: String
maintainerName = "Logan Braga"
maintainerMail :: String
maintainerMail = "<" ++ email ++ ">"
where
email = address ++ "@" ++ host
address = "braga.logan"
host = "gmail.com"
maintainerInfo :: String
maintainerInfo = maintainerName ++ " " ++ maintainerMail
projectName :: String
projectName = "Hunch"
projectDesc :: String
projectDesc = "Terse syntax for file system manipulation"
projectYear :: Integer
projectYear = 2015
projectRepo :: String
projectRepo = "github.com/loganbraga/hunch"
projectLicense :: String
projectLicense = "MIT"
|
loganbraga/hunch
|
app/Hunch/Constants.hs
|
mit
| 616 | 0 | 8 | 107 | 127 | 74 | 53 | 20 | 1 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoMonomorphismRestriction #-} -- FIXME remove
-- | Some fields spiced up with jQuery UI.
module Yesod.Form.Jquery
( YesodJquery (..)
, jqueryDayField
, jqueryAutocompleteField
, googleHostedJqueryUiCss
, JqueryDaySettings (..)
, Default (..)
) where
import Yesod.Handler
import Yesod.Core (Route)
import Yesod.Form
import Yesod.Widget
import Data.Time (Day)
import Data.Default
import Text.Hamlet (shamlet)
import Text.Julius (julius)
import Data.Text (Text, pack, unpack)
import Data.Monoid (mconcat)
import Yesod.Core (RenderMessage)
-- | Gets the Google hosted jQuery UI 1.8 CSS file with the given theme.
googleHostedJqueryUiCss :: Text -> Text
googleHostedJqueryUiCss theme = mconcat
[ "http://ajax.googleapis.com/ajax/libs/jqueryui/1.8/themes/"
, theme
, "/jquery-ui.css"
]
class YesodJquery a where
-- | The jQuery Javascript file. Note that in upgrades to this library, the
-- version of jQuery referenced, or where it is downloaded from, may be
-- changed without warning. If you are relying on a specific version of
-- jQuery, you should give an explicit URL instead of relying on the
-- default value.
--
-- Currently, the default value is jQuery 1.7 from Google\'s CDN.
urlJqueryJs :: a -> Either (Route a) Text
urlJqueryJs _ = Right "http://ajax.googleapis.com/ajax/libs/jquery/1.7/jquery.min.js"
-- | The jQuery UI 1.8 Javascript file.
urlJqueryUiJs :: a -> Either (Route a) Text
urlJqueryUiJs _ = Right "http://ajax.googleapis.com/ajax/libs/jqueryui/1.8/jquery-ui.min.js"
-- | The jQuery UI 1.8 CSS file; defaults to cupertino theme.
urlJqueryUiCss :: a -> Either (Route a) Text
urlJqueryUiCss _ = Right $ googleHostedJqueryUiCss "cupertino"
-- | jQuery UI time picker add-on.
urlJqueryUiDateTimePicker :: a -> Either (Route a) Text
urlJqueryUiDateTimePicker _ = Right "http://github.com/gregwebs/jquery.ui.datetimepicker/raw/master/jquery.ui.datetimepicker.js"
jqueryDayField :: (RenderMessage master FormMessage, YesodJquery master) => JqueryDaySettings -> Field sub master Day
jqueryDayField jds = Field
{ fieldParse = parseHelper $ maybe
(Left MsgInvalidDay)
Right
. readMay
. unpack
, fieldView = \theId name attrs val isReq -> do
toWidget [shamlet|
$newline never
<input id="#{theId}" name="#{name}" *{attrs} type="date" :isReq:required="" value="#{showVal val}">
|]
addScript' urlJqueryJs
addScript' urlJqueryUiJs
addStylesheet' urlJqueryUiCss
toWidget [julius|
$(function(){
var i = document.getElementById("#{theId}");
if (i.type != "date") {
$(i).datepicker({
dateFormat:'yy-mm-dd',
changeMonth:#{jsBool $ jdsChangeMonth jds},
changeYear:#{jsBool $ jdsChangeYear jds},
numberOfMonths:#{mos $ jdsNumberOfMonths jds},
yearRange:"#{jdsYearRange jds}"
});
}
});
|]
}
where
showVal = either id (pack . show)
jsBool True = "true" :: Text
jsBool False = "false" :: Text
mos (Left i) = show i
mos (Right (x, y)) = concat
[ "["
, show x
, ","
, show y
, "]"
]
jqueryAutocompleteField :: (RenderMessage master FormMessage, YesodJquery master)
=> Route master -> Field sub master Text
jqueryAutocompleteField src = Field
{ fieldParse = parseHelper $ Right
, fieldView = \theId name attrs val isReq -> do
toWidget [shamlet|
$newline never
<input id="#{theId}" name="#{name}" *{attrs} type="text" :isReq:required="" value="#{either id id val}" .autocomplete>
|]
addScript' urlJqueryJs
addScript' urlJqueryUiJs
addStylesheet' urlJqueryUiCss
toWidget [julius|
$(function(){$("##{theId}").autocomplete({source:"@{src}",minLength:2})});
|]
}
addScript' :: (master -> Either (Route master) Text) -> GWidget sub master ()
addScript' f = do
y <- lift getYesod
addScriptEither $ f y
addStylesheet' :: (y -> Either (Route y) Text) -> GWidget sub y ()
addStylesheet' f = do
y <- lift getYesod
addStylesheetEither $ f y
readMay :: Read a => String -> Maybe a
readMay s = case reads s of
(x, _):_ -> Just x
[] -> Nothing
data JqueryDaySettings = JqueryDaySettings
{ jdsChangeMonth :: Bool
, jdsChangeYear :: Bool
, jdsYearRange :: String
, jdsNumberOfMonths :: Either Int (Int, Int)
}
instance Default JqueryDaySettings where
def = JqueryDaySettings
{ jdsChangeMonth = False
, jdsChangeYear = False
, jdsYearRange = "c-10:c+10"
, jdsNumberOfMonths = Left 1
}
|
piyush-kurur/yesod
|
yesod-form/Yesod/Form/Jquery.hs
|
mit
| 4,891 | 0 | 12 | 1,180 | 938 | 505 | 433 | 93 | 3 |
{-# LANGUAGE PackageImports, BangPatterns, FlexibleContexts, AllowAmbiguousTypes #-}
{-# OPTIONS -Wall -fno-warn-missing-signatures -fno-warn-incomplete-patterns #-}
module ImageProcessing.ChannelProcessing
( operateRGB
, blurRGB
, blurGrey
, computeChannel
, blurChannel
) where
import System.IO.Unsafe
import Data.Array.Repa
import Prelude as P
import ImageRep.Utility (fromChannels, toChannels)
import Data.Vector.Unboxed.Base()
import ImageProcessing.DoubleProcess (applyGauss)
operateRGB :: (Source r1 e1, Source r2 e2)
=> (Array D DIM2 e1 -> Array r2 DIM2 e2)
-> Array r1 DIM3 e1 -> Array D DIM3 e2
operateRGB f = fromChannels . listToTuple . (P.map f) . tupleToList . toChannels
blurRGB :: (Source r Double) => Array r DIM3 Double -> Array D DIM3 Double
blurRGB = operateRGB blurGrey
blurGrey :: Array D DIM2 Double -> Array U DIM2 Double
blurGrey = blurChannel . computeChannel
blurChannel :: Array U DIM2 Double -> Array U DIM2 Double
blurChannel = unsafePerformIO . applyGauss
computeChannel :: Array D DIM2 Double -> Array U DIM2 Double
computeChannel = unsafePerformIO . computeP
tupleToList :: (a,a,a) -> [a]
tupleToList (a,b,c) = [a,b,c]
listToTuple :: [a] -> (a,a,a)
listToTuple (x:y:z:[]) = (x,y,z)
|
eklinkhammer/haskell-vision
|
src/ImageProcessing/ChannelProcessing.hs
|
mit
| 1,285 | 0 | 10 | 240 | 414 | 229 | 185 | 30 | 1 |
{-# LANGUAGE
ConstraintKinds, TemplateHaskell #-}
module Notes (
NID, Note (..), NStore,
runDB, DBState (..), DB, DB', DBT,
nid, subs, title, created, lastModified,
db, notes, root,
freshDB,
genNid,
newNote, atNid, addSub, shuffleSubs, deleteEverywhere )
where
import ClassyPrelude hiding (getLine, putStrLn, on)
import Text.Printf
import Data.UUID
import Control.Monad.Classes
import Control.Monad.State (StateT, evalStateT)
import System.Random (randomIO)
import System.Random.Shuffle (shuffleM)
import Data.Time.LocalTime
import ClassesLens
import Helpers
-- | Unique note identifier.
type NID = UUID
-- | A single note.
data Note = Note {
-- | Note identifier.
_nid :: NID,
-- | A list of note's children.
_subs :: [NID],
-- | Note's title.
_title :: Text,
-- | Note's time of creation.
_created :: ZonedTime,
-- | Note's time of modification.
_lastModified :: ZonedTime }
deriving (Read, Show)
makeLenses ''Note
-- | A structure which binds 'Note's to their 'NID's.
type NStore = Map NID Note
-- | Permanently stored information about notes (in other words, what goes
-- into the database file).
data DBState = DBState {
_notes :: NStore,
_root :: NID }
deriving (Read, Show)
makeLenses ''DBState
-- | Monads with this constraint provide access to the note database ('DB').
type DB' m = (Functor m, MonadState DBState m)
-- | A type for actions with the access to the note database. See 'DB''.
type DB a = DB' m => m a
-- | A 'DB' monad transformer.
type DBT = StateT DBState
-- | Creates a fresh database and runs a 'DB' action.
runDB :: IO' m => DBT m a → m a
runDB act = evalStateT (freshDB >> act) err
where
err = error "Notes.runDB: tried to access undefined state."
-- | A 'Lens' for accessing 'DBState'.
db :: Lens' DBState DBState
db = id
-- | Generates a fresh 'DBState' with an untitled root note and no children.
freshDB :: (IO' m, DB' m) => m ()
freshDB = do
db .= DBState {
_notes = mempty,
_root = error "Notes.freshDB: ?!" }
rt ← newNote [] "/"
root .= rt
-- | Generates a random NID.
genNid :: IO_ NID
genNid = liftIO' randomIO
-- | Adds a note to the database (without yet linking it anywhere).
newNote ::
(IO' m, DB' m) =>
[NID] → -- ^ Note's children.
Text → -- ^ Note's title.
m NID -- ^ Created note's 'NID'.
newNote s t = do
nid ← genNid
timeStamp ← liftIO' getZonedTime
let n = Note {
_nid = nid,
_subs = s,
_title = t,
_created = timeStamp,
_lastModified = timeStamp }
notes.at nid .= Just n
return nid
-- | Makes a note a child of another note (without checking if it was already
-- a child!).
addSub ::
NID → -- ^ Child note.
NID → -- ^ Parent note.
DB ()
addSub note parent = atNid parent.subs <>= [note]
-- | A lens for accessing a note, given its 'NID'.
--
-- Throws an exception if note is not found.
atNid :: NID → Lens' DBState Note
atNid n = lens get set
where
err = error $ printf "atNid: %s not found" (show n)
get ms = fromMaybe err $ ms ^. notes.at n
set ms note | ms ^. notes.to (member n) = ms & notes.ix n .~ note
| otherwise = err
-- | Randomly shuffles children of a note.
shuffleSubs :: (DB' m, IO' m) => NID → m ()
shuffleSubs note = do
subs' ← liftIO' . shuffleM /$/ use $ atNid note.subs
atNid note.subs .= subs'
-- | Traverses the entire tree, removing note with NIDs satisfying the
-- predicate from every other note's list of children.
deleteEverywhere :: (NID → Bool) → DB ()
deleteEverywhere p = go =<< use root
where
go n = do
ss ← use (notes.ix n.subs)
let ss' = filter (not . p) ss
mapM_ go ss'
(notes.ix n.subs) .= ss'
|
aelve/Jane
|
Notes.hs
|
mit
| 3,963 | 0 | 14 | 1,105 | 992 | 542 | 450 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Handler.Login
(loginHandler) where
import Snap.Snaplet.Auth (loginUser, currentUser)
import Text.Blaze.Html5
import Snap.Blaze (blaze)
import Application
loginHandler :: AuthHandler ()
loginHandler = loginUser "username" "password" (Just "remember") onFailure onSuccess
where
onFailure authFail = blaze $ string $ "Error: " ++ show authFail
onSuccess = do
u <- currentUser
case u of
Just _ -> blaze $ docTypeHtml $ string $ show u
Nothing -> blaze "Can't happen"
|
epsilonhalbe/rendezvous
|
src/Handler/Login.hs
|
mit
| 538 | 0 | 14 | 103 | 154 | 81 | 73 | 15 | 2 |
module TicTactToeLocalPvP where
import Data.Function (on)
import Control.Monad.Trans.Class
import Control.Monad.Trans.State
import TicTacToe
import TicTacToeIO
main = do (winner, (_, finalBoard)) <- runStateT runUntilWin (X, clearBoard)
putStrLn $ announceResult winner
putStrLn "The final board state is:"
putStrLn $ pprintBoard finalBoard
runUntilWin :: GameStateIO (Maybe Player)
runUntilWin = do p <- nextPlayer
b <- theBoard
fmap pprintBoard theBoard >>= lift . putStr
lift $ putStrLn $ show p ++ ", make your move! (format: 'An' where A is the column and n is the row in which you wish to place an " ++ show p ++ "."
move <- getMove $ occupiedFields b -- this does not terminate until a valid move is input
let newBoard = setField move p b
case checkWin newBoard of
Nothing -> if isFull b
then return Nothing
else put (otherPlayer p, newBoard) >> runUntilWin
Just x -> return $ Just x
|
Solonarv/TicTacToe
|
TicTacToeLocalPvP.hs
|
mit
| 1,134 | 0 | 14 | 389 | 266 | 132 | 134 | 22 | 3 |
fact 0 = 1
fact n = n * fact(n - 1)
main = do
print $ fact 10
|
mino2357/Hello_Haskell
|
src/haskell014.hs
|
mit
| 67 | 0 | 8 | 24 | 46 | 22 | 24 | 4 | 1 |
module FileIO where
-- takes a path and a file name and loads the data set as a list of transactions, where transactions are a list of strings
importData :: String -> String -> IO [[String]]
importData path file = do
fileString <- readFile (path ++ "/" ++ file)
return (map words $ lines fileString)
exportResult :: String -> String -> String -> IO ()
exportResult path file res = do writeFile (path ++ "/res_" ++ file) res
|
gaetjen/FunFreakyPatMan
|
src/FileIO.hs
|
mit
| 434 | 0 | 11 | 88 | 130 | 65 | 65 | 7 | 1 |
{-| Implementation of command-line functions.
This module holds the common command-line related functions for the
binaries, separated into this module since "Ganeti.Utils" is
used in many other places and this is more IO oriented.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.HTools.CLI
( Options(..)
, OptType
, defaultOptions
, Ganeti.HTools.CLI.parseOpts
, parseOptsInner
, parseYesNo
, parseISpecString
, shTemplate
, maybeSaveCommands
, maybePrintNodes
, maybePrintInsts
, maybeShowWarnings
, printKeys
, printFinal
, setNodeStatus
-- * The options
, oDataFile
, oDiskMoves
, oDiskTemplate
, oSpindleUse
, oDynuFile
, oMonD
, oMonDDataFile
, oEvacMode
, oRestrictedMigrate
, oExInst
, oExTags
, oExecJobs
, oForce
, oFullEvacuation
, oGroup
, oIAllocSrc
, oIgnoreDyn
, oIgnoreNonRedundant
, oIgnoreSoftErrors
, oIndependentGroups
, oAcceptExisting
, oInstMoves
, oJobDelay
, genOLuxiSocket
, oLuxiSocket
, oMachineReadable
, oMaxCpu
, oMaxSolLength
, oMinDisk
, oMinGain
, oMinGainLim
, oMinResources
, oMinScore
, oNoHeaders
, oNoSimulation
, oNodeSim
, oNodeTags
, oOfflineMaintenance
, oOfflineNode
, oOneStepOnly
, oOutputDir
, oPrintCommands
, oPrintInsts
, oPrintMoves
, oPrintNodes
, oQuiet
, oRapiMaster
, oSaveCluster
, oSelInst
, oShowHelp
, oShowVer
, oShowComp
, oSkipNonRedundant
, oStdSpec
, oTargetResources
, oTieredSpec
, oVerbose
, oPriority
, genericOpts
) where
import Control.Monad
import Data.Char (toUpper)
import Data.Maybe (fromMaybe)
import System.Console.GetOpt
import System.IO
import Text.Printf (printf)
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.Path as Path
import Ganeti.HTools.Types
import Ganeti.BasicTypes
import Ganeti.Common as Common
import Ganeti.Types
import Ganeti.Utils
-- * Data types
-- | Command line options structure.
data Options = Options
{ optDataFile :: Maybe FilePath -- ^ Path to the cluster data file
, optDiskMoves :: Bool -- ^ Allow disk moves
, optInstMoves :: Bool -- ^ Allow instance moves
, optDiskTemplate :: Maybe DiskTemplate -- ^ Override for the disk template
, optSpindleUse :: Maybe Int -- ^ Override for the spindle usage
, optDynuFile :: Maybe FilePath -- ^ Optional file with dynamic use data
, optIgnoreDynu :: Bool -- ^ Do not use dynamic use data
, optIgnoreSoftErrors :: Bool -- ^ Ignore soft errors in balancing moves
, optIndependentGroups :: Bool -- ^ consider groups independently
, optAcceptExisting :: Bool -- ^ accept existing N+1 violations
, optMonD :: Bool -- ^ Query MonDs
, optMonDFile :: Maybe FilePath -- ^ Optional file with data provided
-- ^ by MonDs
, optEvacMode :: Bool -- ^ Enable evacuation mode
, optRestrictedMigrate :: Bool -- ^ Disallow replace-primary moves
, optExInst :: [String] -- ^ Instances to be excluded
, optExTags :: Maybe [String] -- ^ Tags to use for exclusion
, optExecJobs :: Bool -- ^ Execute the commands via Luxi
, optForce :: Bool -- ^ Force the execution
, optFullEvacuation :: Bool -- ^ Fully evacuate nodes to be rebooted
, optGroup :: Maybe GroupID -- ^ The UUID of the group to process
, optIAllocSrc :: Maybe FilePath -- ^ The iallocation spec
, optIgnoreNonRedundant :: Bool -- ^ Ignore non-redundant instances
, optSelInst :: [String] -- ^ Instances to be excluded
, optLuxi :: Maybe FilePath -- ^ Collect data from Luxi
, optJobDelay :: Double -- ^ Delay before executing first job
, optMachineReadable :: Bool -- ^ Output machine-readable format
, optMaster :: String -- ^ Collect data from RAPI
, optMaxLength :: Int -- ^ Stop after this many steps
, optMcpu :: Maybe Double -- ^ Override max cpu ratio for nodes
, optMdsk :: Double -- ^ Max disk usage ratio for nodes
, optMinGain :: Score -- ^ Min gain we aim for in a step
, optMinGainLim :: Score -- ^ Limit below which we apply mingain
, optMinResources :: Double -- ^ Minimal resources for hsqueeze
, optMinScore :: Score -- ^ The minimum score we aim for
, optNoHeaders :: Bool -- ^ Do not show a header line
, optNoSimulation :: Bool -- ^ Skip the rebalancing dry-run
, optNodeSim :: [String] -- ^ Cluster simulation mode
, optNodeTags :: Maybe [String] -- ^ List of node tags to restrict to
, optOffline :: [String] -- ^ Names of offline nodes
, optOfflineMaintenance :: Bool -- ^ Pretend all instances are offline
, optOneStepOnly :: Bool -- ^ Only do the first step
, optOutPath :: FilePath -- ^ Path to the output directory
, optPrintMoves :: Bool -- ^ Whether to show the instance moves
, optSaveCluster :: Maybe FilePath -- ^ Save cluster state to this file
, optShowCmds :: Maybe FilePath -- ^ Whether to show the command list
, optShowHelp :: Bool -- ^ Just show the help
, optShowComp :: Bool -- ^ Just show the completion info
, optShowInsts :: Bool -- ^ Whether to show the instance map
, optShowNodes :: Maybe [String] -- ^ Whether to show node status
, optShowVer :: Bool -- ^ Just show the program version
, optSkipNonRedundant :: Bool -- ^ Skip nodes with non-redundant instance
, optStdSpec :: Maybe RSpec -- ^ Requested standard specs
, optTargetResources :: Double -- ^ Target resources for squeezing
, optTestCount :: Maybe Int -- ^ Optional test count override
, optTieredSpec :: Maybe RSpec -- ^ Requested specs for tiered mode
, optReplay :: Maybe String -- ^ Unittests: RNG state
, optVerbose :: Int -- ^ Verbosity level
, optPriority :: Maybe OpSubmitPriority -- ^ OpCode submit priority
} deriving Show
-- | Default values for the command line options.
defaultOptions :: Options
defaultOptions = Options
{ optDataFile = Nothing
, optDiskMoves = True
, optInstMoves = True
, optIndependentGroups = False
, optAcceptExisting = False
, optDiskTemplate = Nothing
, optSpindleUse = Nothing
, optIgnoreDynu = False
, optIgnoreSoftErrors = False
, optDynuFile = Nothing
, optMonD = False
, optMonDFile = Nothing
, optEvacMode = False
, optRestrictedMigrate = False
, optExInst = []
, optExTags = Nothing
, optExecJobs = False
, optForce = False
, optFullEvacuation = False
, optGroup = Nothing
, optIAllocSrc = Nothing
, optIgnoreNonRedundant = False
, optSelInst = []
, optLuxi = Nothing
, optJobDelay = 10
, optMachineReadable = False
, optMaster = ""
, optMaxLength = -1
, optMcpu = Nothing
, optMdsk = defReservedDiskRatio
, optMinGain = 1e-2
, optMinGainLim = 1e-1
, optMinResources = 2.0
, optMinScore = 1e-9
, optNoHeaders = False
, optNoSimulation = False
, optNodeSim = []
, optNodeTags = Nothing
, optSkipNonRedundant = False
, optOffline = []
, optOfflineMaintenance = False
, optOneStepOnly = False
, optOutPath = "."
, optPrintMoves = False
, optSaveCluster = Nothing
, optShowCmds = Nothing
, optShowHelp = False
, optShowComp = False
, optShowInsts = False
, optShowNodes = Nothing
, optShowVer = False
, optStdSpec = Nothing
, optTargetResources = 2.0
, optTestCount = Nothing
, optTieredSpec = Nothing
, optReplay = Nothing
, optVerbose = 1
, optPriority = Nothing
}
-- | Abbreviation for the option type.
type OptType = GenericOptType Options
instance StandardOptions Options where
helpRequested = optShowHelp
verRequested = optShowVer
compRequested = optShowComp
requestHelp o = o { optShowHelp = True }
requestVer o = o { optShowVer = True }
requestComp o = o { optShowComp = True }
-- * Helper functions
parseISpecString :: String -> String -> Result RSpec
parseISpecString descr inp = do
let sp = sepSplit ',' inp
err = Bad ("Invalid " ++ descr ++ " specification: '" ++ inp ++
"', expected disk,ram,cpu")
when (length sp < 3 || length sp > 4) err
prs <- mapM (\(fn, val) -> fn val) $
zip [ annotateResult (descr ++ " specs disk") . parseUnit
, annotateResult (descr ++ " specs memory") . parseUnit
, tryRead (descr ++ " specs cpus")
, tryRead (descr ++ " specs spindles")
] sp
case prs of
{- Spindles are optional, so that they are not needed when exclusive storage
is disabled. When exclusive storage is disabled, spindles are ignored,
so the actual value doesn't matter. We use 1 as a default so that in
case someone forgets and exclusive storage is enabled, we don't run into
weird situations. -}
[dsk, ram, cpu] -> return $ RSpec cpu ram dsk 1
[dsk, ram, cpu, spn] -> return $ RSpec cpu ram dsk spn
_ -> err
-- | Disk template choices.
optComplDiskTemplate :: OptCompletion
optComplDiskTemplate = OptComplChoices $
map diskTemplateToRaw [minBound..maxBound]
-- * Command line options
oDataFile :: OptType
oDataFile =
(Option "t" ["text-data"]
(ReqArg (\ f o -> Ok o { optDataFile = Just f }) "FILE")
"the cluster data FILE",
OptComplFile)
oDiskMoves :: OptType
oDiskMoves =
(Option "" ["no-disk-moves"]
(NoArg (\ opts -> Ok opts { optDiskMoves = False}))
"disallow disk moves from the list of allowed instance changes,\
\ thus allowing only the 'cheap' failover/migrate operations",
OptComplNone)
oMonD :: OptType
oMonD =
(Option "" ["mond"]
(OptArg (\ f opts -> do
flag <- parseYesNo True f
return $ opts { optMonD = flag }) "CHOICE")
"pass either 'yes' or 'no' to query all monDs",
optComplYesNo)
oMonDDataFile :: OptType
oMonDDataFile =
(Option "" ["mond-data"]
(ReqArg (\ f opts -> Ok opts { optMonDFile = Just f }) "FILE")
"Import data provided by MonDs from the given FILE",
OptComplFile)
oDiskTemplate :: OptType
oDiskTemplate =
(Option "" ["disk-template"]
(reqWithConversion diskTemplateFromRaw
(\dt opts -> Ok opts { optDiskTemplate = Just dt })
"TEMPLATE") "select the desired disk template",
optComplDiskTemplate)
oSpindleUse :: OptType
oSpindleUse =
(Option "" ["spindle-use"]
(reqWithConversion (tryRead "parsing spindle-use")
(\su opts -> do
when (su < 0) $
fail "Invalid value of the spindle-use (expected >= 0)"
return $ opts { optSpindleUse = Just su })
"SPINDLES") "select how many virtual spindle instances use\
\ [default read from cluster]",
OptComplFloat)
oSelInst :: OptType
oSelInst =
(Option "" ["select-instances"]
(ReqArg (\ f opts -> Ok opts { optSelInst = sepSplit ',' f }) "INSTS")
"only select given instances for any moves",
OptComplManyInstances)
oInstMoves :: OptType
oInstMoves =
(Option "" ["no-instance-moves"]
(NoArg (\ opts -> Ok opts { optInstMoves = False}))
"disallow instance (primary node) moves from the list of allowed,\
\ instance changes, thus allowing only slower, but sometimes\
\ safer, drbd secondary changes",
OptComplNone)
oDynuFile :: OptType
oDynuFile =
(Option "U" ["dynu-file"]
(ReqArg (\ f opts -> Ok opts { optDynuFile = Just f }) "FILE")
"Import dynamic utilisation data from the given FILE",
OptComplFile)
oIgnoreDyn :: OptType
oIgnoreDyn =
(Option "" ["ignore-dynu"]
(NoArg (\ opts -> Ok opts {optIgnoreDynu = True}))
"Ignore any dynamic utilisation information",
OptComplNone)
oIgnoreSoftErrors :: OptType
oIgnoreSoftErrors =
(Option "" ["ignore-soft-errors"]
(NoArg (\ opts -> Ok opts {optIgnoreSoftErrors = True}))
"Ignore any soft restrictions in balancing",
OptComplNone)
oIndependentGroups :: OptType
oIndependentGroups =
(Option "" ["independent-groups"]
(NoArg (\ opts -> Ok opts {optIndependentGroups = True}))
"Consider groups independently",
OptComplNone)
oAcceptExisting :: OptType
oAcceptExisting =
(Option "" ["accept-existing-errors"]
(NoArg (\ opts -> Ok opts {optAcceptExisting = True}))
"Accept existing N+1 violations; just don't add new ones",
OptComplNone)
oEvacMode :: OptType
oEvacMode =
(Option "E" ["evac-mode"]
(NoArg (\opts -> Ok opts { optEvacMode = True }))
"enable evacuation mode, where the algorithm only moves\
\ instances away from offline and drained nodes",
OptComplNone)
oRestrictedMigrate :: OptType
oRestrictedMigrate =
(Option "" ["restricted-migration"]
(NoArg (\opts -> Ok opts { optRestrictedMigrate = True }))
"disallow replace-primary moves (aka frf-moves); in evacuation mode, this\
\ will ensure that the only migrations are off the drained nodes",
OptComplNone)
oExInst :: OptType
oExInst =
(Option "" ["exclude-instances"]
(ReqArg (\ f opts -> Ok opts { optExInst = sepSplit ',' f }) "INSTS")
"exclude given instances from any moves",
OptComplManyInstances)
oExTags :: OptType
oExTags =
(Option "" ["exclusion-tags"]
(ReqArg (\ f opts -> Ok opts { optExTags = Just $ sepSplit ',' f })
"TAG,...") "Enable instance exclusion based on given tag prefix",
OptComplString)
oExecJobs :: OptType
oExecJobs =
(Option "X" ["exec"]
(NoArg (\ opts -> Ok opts { optExecJobs = True}))
"execute the suggested moves via Luxi (only available when using\
\ it for data gathering)",
OptComplNone)
oForce :: OptType
oForce =
(Option "f" ["force"]
(NoArg (\ opts -> Ok opts {optForce = True}))
"force the execution of this program, even if warnings would\
\ otherwise prevent it",
OptComplNone)
oFullEvacuation :: OptType
oFullEvacuation =
(Option "" ["full-evacuation"]
(NoArg (\ opts -> Ok opts { optFullEvacuation = True}))
"fully evacuate the nodes to be rebooted",
OptComplNone)
oGroup :: OptType
oGroup =
(Option "G" ["group"]
(ReqArg (\ f o -> Ok o { optGroup = Just f }) "ID")
"the target node group (name or UUID)",
OptComplOneGroup)
oIAllocSrc :: OptType
oIAllocSrc =
(Option "I" ["ialloc-src"]
(ReqArg (\ f opts -> Ok opts { optIAllocSrc = Just f }) "FILE")
"Specify an iallocator spec as the cluster data source",
OptComplFile)
oIgnoreNonRedundant :: OptType
oIgnoreNonRedundant =
(Option "" ["ignore-non-redundant"]
(NoArg (\ opts -> Ok opts { optIgnoreNonRedundant = True }))
"Pretend that there are no non-redundant instances in the cluster",
OptComplNone)
oJobDelay :: OptType
oJobDelay =
(Option "" ["job-delay"]
(reqWithConversion (tryRead "job delay")
(\d opts -> Ok opts { optJobDelay = d }) "SECONDS")
"insert this much delay before the execution of repair jobs\
\ to allow the tool to continue processing instances",
OptComplFloat)
genOLuxiSocket :: String -> OptType
genOLuxiSocket defSocket =
(Option "L" ["luxi"]
(OptArg ((\ f opts -> Ok opts { optLuxi = Just f }) .
fromMaybe defSocket) "SOCKET")
("collect data via Luxi, optionally using the given SOCKET path [" ++
defSocket ++ "]"),
OptComplFile)
oLuxiSocket :: IO OptType
oLuxiSocket = liftM genOLuxiSocket Path.defaultQuerySocket
oMachineReadable :: OptType
oMachineReadable =
(Option "" ["machine-readable"]
(OptArg (\ f opts -> do
flag <- parseYesNo True f
return $ opts { optMachineReadable = flag }) "CHOICE")
"enable machine readable output (pass either 'yes' or 'no' to\
\ explicitly control the flag, or without an argument defaults to\
\ yes)",
optComplYesNo)
oMaxCpu :: OptType
oMaxCpu =
(Option "" ["max-cpu"]
(reqWithConversion (tryRead "parsing max-cpu")
(\mcpu opts -> do
when (mcpu <= 0) $
fail "Invalid value of the max-cpu ratio, expected >0"
return $ opts { optMcpu = Just mcpu }) "RATIO")
"maximum virtual-to-physical cpu ratio for nodes (from 0\
\ upwards) [default read from cluster]",
OptComplFloat)
oMaxSolLength :: OptType
oMaxSolLength =
(Option "l" ["max-length"]
(reqWithConversion (tryRead "max solution length")
(\i opts -> Ok opts { optMaxLength = i }) "N")
"cap the solution at this many balancing or allocation\
\ rounds (useful for very unbalanced clusters or empty\
\ clusters)",
OptComplInteger)
oMinDisk :: OptType
oMinDisk =
(Option "" ["min-disk"]
(reqWithConversion (tryRead "min free disk space")
(\n opts -> Ok opts { optMdsk = n }) "RATIO")
"minimum free disk space for nodes (between 0 and 1) [0]",
OptComplFloat)
oMinGain :: OptType
oMinGain =
(Option "g" ["min-gain"]
(reqWithConversion (tryRead "min gain")
(\g opts -> Ok opts { optMinGain = g }) "DELTA")
"minimum gain to aim for in a balancing step before giving up",
OptComplFloat)
oMinGainLim :: OptType
oMinGainLim =
(Option "" ["min-gain-limit"]
(reqWithConversion (tryRead "min gain limit")
(\g opts -> Ok opts { optMinGainLim = g }) "SCORE")
"minimum cluster score for which we start checking the min-gain",
OptComplFloat)
oMinResources :: OptType
oMinResources =
(Option "" ["minimal-resources"]
(reqWithConversion (tryRead "minimal resources")
(\d opts -> Ok opts { optMinResources = d}) "FACTOR")
"minimal resources to be present on each in multiples of\
\ the standard allocation for not onlining standby nodes",
OptComplFloat)
oMinScore :: OptType
oMinScore =
(Option "e" ["min-score"]
(reqWithConversion (tryRead "min score")
(\e opts -> Ok opts { optMinScore = e }) "EPSILON")
"mininum score to aim for",
OptComplFloat)
oNoHeaders :: OptType
oNoHeaders =
(Option "" ["no-headers"]
(NoArg (\ opts -> Ok opts { optNoHeaders = True }))
"do not show a header line",
OptComplNone)
oNoSimulation :: OptType
oNoSimulation =
(Option "" ["no-simulation"]
(NoArg (\opts -> Ok opts {optNoSimulation = True}))
"do not perform rebalancing simulation",
OptComplNone)
oNodeSim :: OptType
oNodeSim =
(Option "" ["simulate"]
(ReqArg (\ f o -> Ok o { optNodeSim = f:optNodeSim o }) "SPEC")
"simulate an empty cluster, given as\
\ 'alloc_policy,num_nodes,disk,ram,cpu'",
OptComplString)
oNodeTags :: OptType
oNodeTags =
(Option "" ["node-tags"]
(ReqArg (\ f opts -> Ok opts { optNodeTags = Just $ sepSplit ',' f })
"TAG,...") "Restrict to nodes with the given tags",
OptComplString)
oOfflineMaintenance :: OptType
oOfflineMaintenance =
(Option "" ["offline-maintenance"]
(NoArg (\ opts -> Ok opts {optOfflineMaintenance = True}))
"Schedule offline maintenance, i.e., pretend that all instance are\
\ offline.",
OptComplNone)
oOfflineNode :: OptType
oOfflineNode =
(Option "O" ["offline"]
(ReqArg (\ n o -> Ok o { optOffline = n:optOffline o }) "NODE")
"set node as offline",
OptComplOneNode)
oOneStepOnly :: OptType
oOneStepOnly =
(Option "" ["one-step-only"]
(NoArg (\ opts -> Ok opts {optOneStepOnly = True}))
"Only do the first step",
OptComplNone)
oOutputDir :: OptType
oOutputDir =
(Option "d" ["output-dir"]
(ReqArg (\ d opts -> Ok opts { optOutPath = d }) "PATH")
"directory in which to write output files",
OptComplDir)
oPrintCommands :: OptType
oPrintCommands =
(Option "C" ["print-commands"]
(OptArg ((\ f opts -> Ok opts { optShowCmds = Just f }) .
fromMaybe "-")
"FILE")
"print the ganeti command list for reaching the solution,\
\ if an argument is passed then write the commands to a\
\ file named as such",
OptComplNone)
oPrintInsts :: OptType
oPrintInsts =
(Option "" ["print-instances"]
(NoArg (\ opts -> Ok opts { optShowInsts = True }))
"print the final instance map",
OptComplNone)
oPrintMoves :: OptType
oPrintMoves =
(Option "" ["print-moves"]
(NoArg (\ opts -> Ok opts { optPrintMoves = True }))
"print the moves of the instances",
OptComplNone)
oPrintNodes :: OptType
oPrintNodes =
(Option "p" ["print-nodes"]
(OptArg ((\ f opts ->
let (prefix, realf) = case f of
'+':rest -> (["+"], rest)
_ -> ([], f)
splitted = prefix ++ sepSplit ',' realf
in Ok opts { optShowNodes = Just splitted }) .
fromMaybe []) "FIELDS")
"print the final node list",
OptComplNone)
oQuiet :: OptType
oQuiet =
(Option "q" ["quiet"]
(NoArg (\ opts -> Ok opts { optVerbose = optVerbose opts - 1 }))
"decrease the verbosity level",
OptComplNone)
oRapiMaster :: OptType
oRapiMaster =
(Option "m" ["master"]
(ReqArg (\ m opts -> Ok opts { optMaster = m }) "ADDRESS")
"collect data via RAPI at the given ADDRESS",
OptComplHost)
oSaveCluster :: OptType
oSaveCluster =
(Option "S" ["save"]
(ReqArg (\ f opts -> Ok opts { optSaveCluster = Just f }) "FILE")
"Save cluster state at the end of the processing to FILE",
OptComplNone)
oSkipNonRedundant :: OptType
oSkipNonRedundant =
(Option "" ["skip-non-redundant"]
(NoArg (\ opts -> Ok opts { optSkipNonRedundant = True }))
"Skip nodes that host a non-redundant instance",
OptComplNone)
oStdSpec :: OptType
oStdSpec =
(Option "" ["standard-alloc"]
(ReqArg (\ inp opts -> do
tspec <- parseISpecString "standard" inp
return $ opts { optStdSpec = Just tspec } )
"STDSPEC")
"enable standard specs allocation, given as 'disk,ram,cpu'",
OptComplString)
oTargetResources :: OptType
oTargetResources =
(Option "" ["target-resources"]
(reqWithConversion (tryRead "target resources")
(\d opts -> Ok opts { optTargetResources = d}) "FACTOR")
"target resources to be left on each node after squeezing in\
\ multiples of the standard allocation",
OptComplFloat)
oTieredSpec :: OptType
oTieredSpec =
(Option "" ["tiered-alloc"]
(ReqArg (\ inp opts -> do
tspec <- parseISpecString "tiered" inp
return $ opts { optTieredSpec = Just tspec } )
"TSPEC")
"enable tiered specs allocation, given as 'disk,ram,cpu'",
OptComplString)
oVerbose :: OptType
oVerbose =
(Option "v" ["verbose"]
(NoArg (\ opts -> Ok opts { optVerbose = optVerbose opts + 1 }))
"increase the verbosity level",
OptComplNone)
oPriority :: OptType
oPriority =
(Option "" ["priority"]
(ReqArg (\ inp opts -> do
prio <- parseSubmitPriority inp
Ok opts { optPriority = Just prio }) "PRIO")
"set the priority of submitted jobs",
OptComplChoices (map fmtSubmitPriority [minBound..maxBound]))
-- | Generic options.
genericOpts :: [GenericOptType Options]
genericOpts = [ oShowVer
, oShowHelp
, oShowComp
]
-- * Functions
-- | Wrapper over 'Common.parseOpts' with our custom options.
parseOpts :: [String] -- ^ The command line arguments
-> String -- ^ The program name
-> [OptType] -- ^ The supported command line options
-> [ArgCompletion] -- ^ The supported command line arguments
-> IO (Options, [String]) -- ^ The resulting options and leftover
-- arguments
parseOpts = Common.parseOpts defaultOptions
-- | A shell script template for autogenerated scripts.
shTemplate :: String
shTemplate =
printf "#!/bin/sh\n\n\
\# Auto-generated script for executing cluster rebalancing\n\n\
\# To stop, touch the file /tmp/stop-htools\n\n\
\set -e\n\n\
\check() {\n\
\ if [ -f /tmp/stop-htools ]; then\n\
\ echo 'Stop requested, exiting'\n\
\ exit 0\n\
\ fi\n\
\}\n\n"
-- | Optionally show or save a list of commands
maybeSaveCommands :: String -- ^ Informal description
-> Options
-> String -- ^ commands
-> IO ()
maybeSaveCommands msg opts cmds =
case optShowCmds opts of
Nothing -> return ()
Just "-" -> do
putStrLn ""
putStrLn msg
putStr . unlines . map (" " ++) . filter (/= " check") . lines $ cmds
Just out_path -> do
writeFile out_path (shTemplate ++ cmds)
printf "The commands have been written to file '%s'\n" out_path
-- | Optionally print the node list.
maybePrintNodes :: Maybe [String] -- ^ The field list
-> String -- ^ Informational message
-> ([String] -> String) -- ^ Function to generate the listing
-> IO ()
maybePrintNodes Nothing _ _ = return ()
maybePrintNodes (Just fields) msg fn = do
hPutStrLn stderr ""
hPutStrLn stderr (msg ++ " status:")
hPutStrLn stderr $ fn fields
-- | Optionally print the instance list.
maybePrintInsts :: Bool -- ^ Whether to print the instance list
-> String -- ^ Type of the instance map (e.g. initial)
-> String -- ^ The instance data
-> IO ()
maybePrintInsts do_print msg instdata =
when do_print $ do
hPutStrLn stderr ""
hPutStrLn stderr $ msg ++ " instance map:"
hPutStr stderr instdata
-- | Function to display warning messages from parsing the cluster
-- state.
maybeShowWarnings :: [String] -- ^ The warning messages
-> IO ()
maybeShowWarnings fix_msgs =
unless (null fix_msgs) $ do
hPutStrLn stderr "Warning: cluster has inconsistent data:"
hPutStrLn stderr . unlines . map (printf " - %s") $ fix_msgs
-- | Format a list of key, value as a shell fragment.
printKeys :: String -- ^ Prefix to printed variables
-> [(String, String)] -- ^ List of (key, value) pairs to be printed
-> IO ()
printKeys prefix =
mapM_ (\(k, v) ->
printf "%s_%s=%s\n" prefix (map toUpper k) (ensureQuoted v))
-- | Prints the final @OK@ marker in machine readable output.
printFinal :: String -- ^ Prefix to printed variable
-> Bool -- ^ Whether output should be machine readable;
-- note: if not, there is nothing to print
-> IO ()
printFinal prefix True =
-- this should be the final entry
printKeys prefix [("OK", "1")]
printFinal _ False = return ()
-- | Potentially set the node as offline based on passed offline list.
setNodeOffline :: [Ndx] -> Node.Node -> Node.Node
setNodeOffline offline_indices n =
if Node.idx n `elem` offline_indices
then Node.setOffline n True
else n
-- | Set node properties based on command line options.
setNodeStatus :: Options -> Node.List -> IO Node.List
setNodeStatus opts fixed_nl = do
let offline_passed = optOffline opts
all_nodes = Container.elems fixed_nl
offline_lkp = map (lookupName (map Node.name all_nodes)) offline_passed
offline_wrong = filter (not . goodLookupResult) offline_lkp
offline_names = map lrContent offline_lkp
offline_indices = map Node.idx $
filter (\n -> Node.name n `elem` offline_names)
all_nodes
m_cpu = optMcpu opts
m_dsk = optMdsk opts
unless (null offline_wrong) .
exitErr $ printf "wrong node name(s) set as offline: %s\n"
(commaJoin (map lrContent offline_wrong))
let setMCpuFn = case m_cpu of
Nothing -> id
Just new_mcpu -> flip Node.setMcpu new_mcpu
let nm = Container.map (setNodeOffline offline_indices .
flip Node.setMdsk m_dsk .
setMCpuFn) fixed_nl
return nm
|
ribag/ganeti-experiments
|
src/Ganeti/HTools/CLI.hs
|
gpl-2.0
| 28,625 | 276 | 16 | 7,301 | 6,024 | 3,415 | 2,609 | 691 | 3 |
{-# LANGUAGE TypeFamilies #-}
{- Arrow-based types and classes for synchronous circuits.
- Copyright : (C)opyright 2004-2005, 2009-2011 peteg42 at gmail dot com
- License : GPL (see COPYING for details)
-}
module ADHOC.Circuits
( module ADHOC.Basis
, ArrowComb(..)
, ArrowMux(..)
, muxAC
, muxA'
-- * Command combinators and Unicode syntax for them.
, notAC, andAC, iffAC, impAC, orAC, xorAC
, (∧), (¬)
, (↑), (∨), (⟺), (⟹), (⟸)
, ArrowDelay(..), delayAC
, ArrowInit(..)
, ArrowCombLoop(..)
-- * Extra generic circuits.
, idB
, fby
, latchA
) where
-------------------------------------------------------------------
-- Dependencies.
-------------------------------------------------------------------
import Prelude hiding ( id, (.) )
import ADHOC.Basis
-------------------------------------------------------------------
-- Synchronous circuit simulation arrows.
-------------------------------------------------------------------
-- FIXME all instances of 'ArrowComb' are transformers. We FIXME FIXME
-- run into problems with the ST monad if we expect B to map (~> :: *
-- -> * -> *) to the Boolean type for arrow. Try the transformer
-- type. Less general.
-- type family B ((~>) :: ((* -> * -> *) -> * -> *)) :: *
-- FIXME this implies that we cannot lift a ArrowComb instance up -
-- imagine the E arrow.
-- | Standard combinational logic. Minimal definition is 'falseA',
-- 'trueA', 'andA' and 'notA'. For the user's convenience, the arrow
-- type determines how booleans are represented.
-- FIXME making these command combinators enables constant propogation ??
class Arrow (~>) => ArrowComb (~>) where
type B (~>) :: * -- FIXME not ST-friendly
falseA :: env ~> B (~>)
trueA :: env ~> B (~>)
andA :: (B (~>), B (~>)) ~> B (~>)
notA :: B (~>) ~> B (~>)
nandA :: (B (~>), B (~>)) ~> B (~>)
nandA = andA >>> notA
orA :: (B (~>), B (~>)) ~> B (~>)
orA = (notA *** notA) >>> andA >>> notA
xorA :: (B (~>), B (~>)) ~> B (~>)
xorA = (orA &&& (andA >>> notA)) >>> andA
iffA :: (B (~>), B (~>)) ~> B (~>)
iffA = xorA >>> notA
impA :: (B (~>), B (~>)) ~> B (~>)
impA = notA *** id >>> orA
-- | Attach a note to a sub-circuit.
note :: String -> (b ~> c) -> (b ~> c)
note _ = id
-- | Binary 'ArrowComb' command combinators.
andAC, nandAC, orAC, xorAC, iffAC, impAC :: ArrowComb (~>) => (env ~> B (~>)) -> (env ~> B (~>)) -> (env ~> B (~>))
andAC = liftAC2 andA
nandAC = liftAC2 nandA
iffAC = liftAC2 iffA
impAC = liftAC2 impA
orAC = liftAC2 orA
xorAC = liftAC2 xorA
{-# INLINE andAC #-}
{-# INLINE nandAC #-}
{-# INLINE orAC #-}
{-# INLINE xorAC #-}
{-# INLINE iffAC #-}
{-# INLINE impAC #-}
notAC :: ArrowComb (~>) => (env ~> B (~>)) -> (env ~> B (~>))
notAC = liftAC notA
{-# INLINE notAC #-}
-- FIXME This is what we want, but command combinators break in GHC with infixr
-- infixr 8 ⟹, `impAC`, `xorAC`
-- infixl 8 ⟸
-- infixr 7 ∧, `andAC`
-- infixr 6 ∨, `orAC`
-- infixr 5 ⟺, `iffAC`
infixr 8 ⟹, `impAC`, `xorAC`
infixl 8 ⟸
infixl 7 ∧, `andAC` -- should be infixr
infixl 6 ∨, `orAC` -- should be infixr
infixl 5 ⟺, `iffAC` -- should be infixr
(¬) :: ArrowComb (~>) => (env ~> B (~>)) -> (env ~> B (~>))
(∧), (↑), (∨), (⟺), (⟹), (⟸) :: ArrowComb (~>) => (env ~> B (~>)) -> (env ~> B (~>)) -> (env ~> B (~>))
(∧) = andAC
(¬) = notAC
(↑) = nandAC -- Sheffer stroke
(∨) = orAC
(⟺) = iffAC
(⟹) = impAC
f ⟸ g = g ⟹ f
-- | A multiplexer, if-then-else in "Boolean" but switching an arbitary type.
-- Convention: if the bit is true, then we take the first otherwise the second.
class ArrowComb (~>) => ArrowMux (~>) v where
muxA :: (B (~>), (v, v)) ~> v
-- | Trivial variant of 'muxA' that combines better with lifting.
muxA' :: ArrowMux (~>) v => (B (~>), v, v) ~> v
muxA' = arr (\(c, v0, v1) -> (c, (v0, v1))) >>> muxA
-- | Command-combinator variant of 'muxAC'.
muxAC :: ArrowMux (~>) v => (env ~> B (~>)) -> (env ~> v) -> (env ~> v) -> (env ~> v)
muxAC barr v0arr v1arr = proc env ->
do b <- barr -< env
v0 <- v0arr -< env
v1 <- v1arr -< env
muxA -< (b, (v0, v1))
-- | An initialised-delay operator, ala Lustre's @(->)@ (followed-by)
-- operator. Well-initialisation is verified by constructivity.
class Arrow (~>) => ArrowDelay (~>) v where
delayA :: (v, v) ~> v
delayAC :: ArrowDelay (~>) v
=> (env ~> v) -- ^ Initial
-> (env ~> v) -- ^ Recurring (delayed by one timestep)
-> (env ~> v)
delayAC = liftAC2 delayA
-- | Statically-cyclic dynamically-acyclic combinational loops.
--
-- Intuitively we could support combinational cycles at all types, but
-- in practice we only use them at the boolean type.
class Arrow (~>) => ArrowCombLoop (~>) r where
combLoop :: ((b, r) ~> (c, r)) -> (b ~> c)
-- | A \'boot\' bit, indicating the circuit is in an initial state.
class Arrow (~>) => ArrowInit (~>) where
isInitialState :: env ~> B (~>)
----------------------------------------
-- FIXME Useful other things
----------------------------------------
-- | Acts as the identity on the 'B' type.
idB :: Arrow (~>) => B (~>) ~> B (~>)
idB = id
-- | Lustre's followed-by operator.
fby :: (ArrowDelay (~>) (B (~>)), ArrowInit (~>), ArrowMux (~>) v)
=> (e ~> v) -> (e ~> v) -> (e ~> v)
f `fby` g = proc e ->
(| muxAC (isInitialState -< ())
(f -< e)
(g -< e) |)
-- | Latch a value.
latchA :: (ArrowDelay (~>) v, ArrowLoop (~>))
=> (v ~> v)
latchA = proc v ->
do rec v' <- (| delayAC (returnA -< v) (returnA -< v') |)
returnA -< v'
|
peteg/ADHOC
|
ADHOC/Circuits.hs
|
gpl-2.0
| 5,621 | 24 | 23 | 1,240 | 1,654 | 998 | 656 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Reffit.Handlers.HandlePasswordReset where
import Control.Applicative
import Control.Error
import Control.Monad.State
import Data.Map.Syntax
import Data.Time
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Heist.Interpreted (textSplice)
import Snap
import Snap.Snaplet.Auth
import Snap.Snaplet.Heist
import Application
import Util.Mailgun
import Util.Snap
handleRequestReset :: Handler App App ()
handleRequestReset = method GET (render "reset_request")
<|> method POST sendResetMail
sendResetMail :: Handler App App ()
sendResetMail = do
email <- requireParam "email"
tNow <- liftIO getCurrentTime
let tExpire = (3600 * 24) `addUTCTime` tNow -- Expire in 1 day
resetReq = ResetRequest tExpire (T.decodeUtf8 email)
m <- gets _mgr
sk <- gets _sign
mgk <- gets _mgKey
liftIO $ sendMailgunResetRequest resetReq m sk mgk
return ()
handleExecuteReset :: Handler App App ()
handleExecuteReset = do
sk <- gets _sign
tk :: T.Text <- T.decodeUtf8 <$> requireParam "token"
method GET (newPWForm tk) <|>
method POST (executeReset sk tk)
where
newPWForm tk =
renderWithSplices "_new_password" ("token" ## textSplice tk)
executeReset :: SigningKey -> T.Text -> Handler App App ()
executeReset sk tk = with auth $ runReffitErrorT $ do
tNow <- liftIO getCurrentTime
am :: AuthManager App <- lift get
em <- fmap T.decodeUtf8 $ checkedParam "email"
pw <- fmap T.decodeUtf8 $ checkedParam "password"
pw' <- fmap T.decodeUtf8 $ checkedParam "password2"
checkedAssert (RErr 412 "Passwords must match") (pw == pw')
(ResetRequest rTime tEmail) <- fmapLT
(const $ RErr 500 "Failed to decode reset token") $
hoistEither $ fromSafeBlob sk (T.encodeUtf8 tk)
checkedAssert (RErr 412 "Email must match token") (em == tEmail)
checkedAssert (RErr 403 "Reset token expired") (tNow < rTime)
u <- noteT (RErr 500 "User lookup failure") $
MaybeT $ liftIO $ lookupByEmail am (em)
fmapLT (const $ RErr 500 "Failed to save user") $
ExceptT $ saveUser =<< liftIO (setPassword u $ T.encodeUtf8 pw)
writeText "Password reset successful"
|
imalsogreg/reffit
|
src/Reffit/Handlers/HandlePasswordReset.hs
|
gpl-3.0
| 2,468 | 0 | 16 | 662 | 712 | 348 | 364 | 57 | 1 |
{-# LANGUAGE QuasiQuotes, ForeignFunctionInterface #-}
module Graphics.PlotWithGnu where
import Data.List
import Control.Monad
import System.IO
import System.Process
import System.Directory
import System.FilePath
import System.Directory
import System.Posix.Temp
import Data.String.QQ
import qualified Data.ByteString.Char8 as B
import Foreign.C.Types
import Foreign.C.String
import System.IO.Unsafe
type DataTable = [[Double]]
select :: [Int] -> [a] -> [a]
select [] _ = []
select (n:ns) xs = xs !! n : select ns xs
fselect :: [[a] -> a] -> [a] -> [a]
fselect fs xs = map ($ xs) fs
transpose' :: [[a]] -> [[a]]
transpose' [] = []
transpose' ([] : _) = [] -- try to make transpose memory efficient
transpose' ((x:xs) : xss) = (x : [h | (h:_) <- xss]) : transpose' (xs : [ t | (_:t) <- xss])
saveDataTable :: FilePath -> DataTable -> IO ()
saveDataTable filename table = writeFile filename $ showDataTable table
loadDataTable :: FilePath -> IO DataTable
loadDataTable filename = readFile filename >>= return . readDataTable
loadDataTable' :: FilePath -> IO DataTable
loadDataTable' filename = B.readFile filename >>=
return . (map . map) unsafeReadDouble . map B.words .
filter (not . isComment) . B.lines
where
isComment l = B.null l || B.head l == '#'
unsafeReadDouble :: B.ByteString -> Double
unsafeReadDouble str = unsafePerformIO $ B.useAsCString str c_atof
foreign import ccall unsafe "stdlib.h atof" c_atof :: CString -> IO Double
showTable :: [[String]] -> String
showTable = unlines . map unwords . transpose . go . transpose where
go = map padStrs
padStrs strs = map (padStr w) strs where
w = maximum $ map length strs
padStr w str = replicate padLen ' ' ++ str where
padLen = w - length str
showTable' :: [[String]] -> String
showTable' = unlines . map unwords
readTable :: String -> [[String]]
readTable = map words . filter (not . isComment) . lines where
isComment l = head l == '#'
showDataTable :: DataTable -> String
showDataTable = showTable . (map . map) show
readDataTable :: String -> DataTable
readDataTable = (map . map) read . readTable
createDataFile :: Int -> DataTable -> IO FilePath
createDataFile _ [] = return ""
createDataFile id table = do
saveDataTable dataFile table
return dataFile
where
dataFile = "data-file-" ++ show id
type PlotFile = String
mkPlotFile :: FilePath -> [String] -> String -> [(String, String)] -> PlotFile
mkPlotFile fn settings plot plotlines = unlines $
[ mpTermHeader
, mpSetOutput fn ]
++ settings ++
[ plot ++ " \\"
, intercalate ", \\\n" $ map go plotlines ]
where
go ("", attr) = " " ++ attr
go (dataFile, attr) = " " ++ show dataFile ++ " " ++ attr
mpTermHeader :: PlotFile
mpTermHeader = "set term mp color latex prologues 3 amstex"
mpSetOutput :: FilePath -> PlotFile
mpSetOutput fn = "set output '" ++ fn ++ ".mp'"
runGnuplotMp :: PlotFile -> IO ()
runGnuplotMp input = do
writeFile "plotfile" input
hlogGnuplot <- openFile "log" AppendMode
void $ runProcess "gnuplot" ["plotfile"] Nothing Nothing Nothing (Just hlogGnuplot) Nothing
>>= waitForProcess
hClose hlogGnuplot
writeFile "convert.sh" commandMpToEps
hlogConvert <- openFile "log" AppendMode
void $ runProcess "bash" ["convert.sh"] Nothing Nothing Nothing (Just hlogConvert) Nothing
>>= waitForProcess
hClose hlogConvert
commandMpToEps :: String
commandMpToEps = [s|
for i in *.mp ; do
fn=${i%.mp}
echo $fn
TEX=latex mpost -jobname mpost-job $fn.mp
mv mpost-job.0 $fn.eps
done
|]
gnuplot :: FilePath -> [String] -> String -> [(DataTable, String)] -> IO FilePath
gnuplot fn settings plot datalines = do
tempdirP <- getTemporaryDirectory
tempdir <- mkdtemp $ tempdirP </> "plot-with-gnu-"
setCurrentDirectory tempdir
dataFiles <- zipWithM createDataFile [1..] dataTables
let plotfile = mkPlotFile fn settings plot $ zip dataFiles $ map snd datalines
runGnuplotMp plotfile
-- mapM_ removeFile dataFiles
return $ tempdir </> replaceExtension fn ".eps"
where
dataTables = map fst datalines
viewEps :: FilePath -> IO ()
viewEps path = void . system $ "evince " ++ path ++ " &"
saveFile :: FilePath -> FilePath -> IO ()
saveFile old new = void . system $ "mv '" ++ old ++ "' '" ++ new ++ "'"
plotview :: [String] -> String -> [(DataTable, String)] -> IO ()
plotview settings plot datalines = do
pwd <- getCurrentDirectory
fn <- gnuplot "PlotWithGnu" settings plot datalines
setCurrentDirectory pwd
viewEps fn
plotsave :: FilePath -> [String] -> String -> [(DataTable, String)] -> IO ()
plotsave fn settings plot datalines = do
pwd <- getCurrentDirectory
fn' <- gnuplot "PlotWithGnu" settings plot datalines
setCurrentDirectory pwd
saveFile fn' fn
|
waterret/PlotWithGnu-haskell
|
src/Graphics/PlotWithGnu.hs
|
gpl-3.0
| 4,836 | 0 | 13 | 1,004 | 1,646 | 845 | 801 | 113 | 2 |
module History
(History, emptyHist, histBack, histFwd, histGo, findFirstBack
) where
import Fields
import Types
import Data.Maybe
import qualified Data.Set as Set
data History a =
History
{ fwd :: [a]
, back :: [a]
} deriving (Eq, Ord, Show)
emptyHist :: History a
emptyHist = History { fwd = [], back = [] }
histBack :: a -> History a -> Maybe (a, History a)
histBack from h = case back h of
[] -> Nothing
(b : bs) -> Just (b, $(upd 'fwd) (from :) h { back = bs })
histFwd :: a -> History a -> Maybe (a, History a)
histFwd from h = case fwd h of
[] -> Nothing
(b : bs) -> Just (b, $(upd 'back) (from :) h { fwd = bs })
histGo :: a -> History a -> History a
histGo from h = h { fwd = [], back = from : back h }
findFirstBack :: (Ord a) => Set a -> History a -> Maybe a
findFirstBack set = listToMaybe . filter (flip Set.member set) . back
|
ktvoelker/argon
|
src/History.hs
|
gpl-3.0
| 882 | 0 | 13 | 222 | 429 | 233 | 196 | -1 | -1 |
-- |
-- Module : Main
-- Copyright : (c) Justus Sagemüller 2017
-- License : GPL v3
--
-- Maintainer : (@) jsag $ hvl.no
-- Stability : experimental
-- Portability : portable
--
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module LaTeXComparer where
import Math.LaTeX.Internal.MathExpr
import CAS.Dumb
import Language.Haskell.TH
import Language.Haskell.TH.Quote
import Language.Haskell.Meta.Parse
import Text.LaTeX (LaTeX, Text)
type SLaTeX = Text
data TestTree σ = TestGroup String [TestTree σ]
| TestCase (LaTeXMath σ) String SLaTeX
mkLaTeXSnip :: QuasiQuoter
mkLaTeXSnip = QuasiQuoter procExpr undefined undefined undefined
where procExpr e = return $ case parseExp e of
Right exp -> ConE 'TestCase `AppE` exp `AppE` LitE (StringL e)
Left perr -> error perr
|
leftaroundabout/Symbolic-math-HaTeX
|
test/PdfSnippets/LaTeXComparer.hs
|
gpl-3.0
| 855 | 0 | 14 | 181 | 185 | 109 | 76 | 17 | 2 |
addOneIfOdd n = case odd n of
True -> (\x -> x + 1) n
False -> n
addFive = \x -> \y -> (if (x>y) then y else x) + 5
|
dkensinger/haskell
|
haskellbook/lambdas.hs
|
gpl-3.0
| 122 | 0 | 11 | 38 | 80 | 43 | 37 | 4 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.Maybe (fromJust)
import Prompt (giveEVs)
import Cards
main = giveEVs p1 p2 card
where b1 = (fromJust . parseCards) "6h,7d,9s,Ts"
m1 = (fromJust . parseCards) "Jc,Kd,Jd"
t1 = (fromJust . parseCards) "3c"
b2 = (fromJust . parseCards) "2s,5s,As"
m2 = (fromJust . parseCards) "4d,3h,5h"
t2 = (fromJust . parseCards) "8h,Qd"
card = (fromJust . parseCard) "Ad"
p1 = Player b1 m1 t1
p2 = Player b2 m2 t2
|
dtrifuno/holz
|
src/Benchmark.hs
|
gpl-3.0
| 532 | 0 | 9 | 146 | 169 | 92 | 77 | 15 | 1 |
-- | A module for converting between integral types.
module MathPrelude.Classes.Integral
( Integral(..)
, fromIntegral98
, toIntegral98
, fromIntegral
-- * Misc
, even
, odd
)
where
-----------------------------------
--- Imports
-----------------------------------
import MathPrelude.Prelude.CorePrelude
import MathPrelude.Prelude.NamedNumbers
import qualified Prelude as P
import MathPrelude.Classes.Ring
-----------------------------------
--- Classes
-----------------------------------
-- | This class describes types that can be transformed into the canonical integer type Integer. All rings already have a map from Integer, and these should be inverses (at least partially).
class (Ring a, Enum a) ⇒ Integral a where
-- | Convert to Integer
toInteger ∷ a → Integer
-----------------------------------
--- Methods
-----------------------------------
-- | Convert from a Prelude defined Integral type to our Integral
fromIntegral98 :: (P.Integral a, Integral b) => a -> b
fromIntegral98 = fromInteger . P.fromIntegral
-- | Convert from our Integral type to the Prelude defined Integral type.
toIntegral98 :: (Integral a, P.Integral b) => a -> b
toIntegral98 = P.fromIntegral . toInteger
-- | Convert any integral to an element of any ring.
fromIntegral :: (Integral a, Ring b) => a -> b
fromIntegral = fromInteger . toInteger
-- | Test whether an integral is even.
even :: Integral a => a -> Bool
even = P.even . toInteger
-- | Test whether an integral is odd.
odd :: Integral a => a -> Bool
odd = P.odd . toInteger
-----------------------------------
--- Instances
-----------------------------------
-- instance P.Integral a ⇒ Integral a where
-- toInteger = P.toInteger
instance Integral Integer where
toInteger = id
instance Integral Int where
toInteger = P.toInteger
instance Integral Int32 where
toInteger = P.toInteger
instance Integral Int64 where
toInteger = P.toInteger
instance Integral Word where
toInteger = P.toInteger
instance Integral Word32 where
toInteger = P.toInteger
instance Integral Word64 where
toInteger = P.toInteger
-----------------------------------
-- Rerwite Rules -- for great justice!
-----------------------------------
-- try not to inline these functions until the last phase (phases count down), so the rewrite rule can come into effect
{-# INLINE [0] fromIntegral98 #-}
{-# RULES "fromIntegral98/a→a" fromIntegral98 = id #-}
{-# INLINE [0] toIntegral98 #-}
{-# RULES "toIntegral98/a→a" toIntegral98 = id #-}
{-# INLINE [0] fromIntegral #-}
{-# RULES "fromIntegral/a→a" fromIntegral = id #-}
-- {-# RULES "fromIntegral/integerToInt" fromIntegral = GHC.integerToInt #-}
-- {-# RULES "fromIntegral/smallInteger" fromIntegral = GHC.smallInteger #-}
-- {-# RULES "fromIntegral/integerToInt64" fromIntegral = GHC.integerToInt64 #-}
-- {-# RULES "fromIntegral/int64ToInteger" fromIntegral = GHC.int64ToInteger #-}
|
RossOgilvie/MathPrelude
|
MathPrelude/Classes/Integral.hs
|
gpl-3.0
| 2,999 | 0 | 7 | 524 | 389 | 233 | 156 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.UnmonitorInstances
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Disables monitoring for a running instance. For more information about
-- monitoring instances, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-cloudwatch.html Monitoring Your Instances and Volumes> in the /AmazonElastic Compute Cloud User Guide/.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-UnmonitorInstances.html>
module Network.AWS.EC2.UnmonitorInstances
(
-- * Request
UnmonitorInstances
-- ** Request constructor
, unmonitorInstances
-- ** Request lenses
, uiDryRun
, uiInstanceIds
-- * Response
, UnmonitorInstancesResponse
-- ** Response constructor
, unmonitorInstancesResponse
-- ** Response lenses
, uirInstanceMonitorings
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data UnmonitorInstances = UnmonitorInstances
{ _uiDryRun :: Maybe Bool
, _uiInstanceIds :: List "InstanceId" Text
} deriving (Eq, Ord, Read, Show)
-- | 'UnmonitorInstances' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'uiDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'uiInstanceIds' @::@ ['Text']
--
unmonitorInstances :: UnmonitorInstances
unmonitorInstances = UnmonitorInstances
{ _uiDryRun = Nothing
, _uiInstanceIds = mempty
}
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have the
-- required permissions, the error response is 'DryRunOperation'. Otherwise, it is 'UnauthorizedOperation'.
uiDryRun :: Lens' UnmonitorInstances (Maybe Bool)
uiDryRun = lens _uiDryRun (\s a -> s { _uiDryRun = a })
-- | One or more instance IDs.
uiInstanceIds :: Lens' UnmonitorInstances [Text]
uiInstanceIds = lens _uiInstanceIds (\s a -> s { _uiInstanceIds = a }) . _List
newtype UnmonitorInstancesResponse = UnmonitorInstancesResponse
{ _uirInstanceMonitorings :: List "item" InstanceMonitoring
} deriving (Eq, Read, Show, Monoid, Semigroup)
-- | 'UnmonitorInstancesResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'uirInstanceMonitorings' @::@ ['InstanceMonitoring']
--
unmonitorInstancesResponse :: UnmonitorInstancesResponse
unmonitorInstancesResponse = UnmonitorInstancesResponse
{ _uirInstanceMonitorings = mempty
}
-- | Monitoring information for one or more instances.
uirInstanceMonitorings :: Lens' UnmonitorInstancesResponse [InstanceMonitoring]
uirInstanceMonitorings =
lens _uirInstanceMonitorings (\s a -> s { _uirInstanceMonitorings = a })
. _List
instance ToPath UnmonitorInstances where
toPath = const "/"
instance ToQuery UnmonitorInstances where
toQuery UnmonitorInstances{..} = mconcat
[ "DryRun" =? _uiDryRun
, "InstanceId" `toQueryList` _uiInstanceIds
]
instance ToHeaders UnmonitorInstances
instance AWSRequest UnmonitorInstances where
type Sv UnmonitorInstances = EC2
type Rs UnmonitorInstances = UnmonitorInstancesResponse
request = post "UnmonitorInstances"
response = xmlResponse
instance FromXML UnmonitorInstancesResponse where
parseXML x = UnmonitorInstancesResponse
<$> x .@? "instancesSet" .!@ mempty
|
romanb/amazonka
|
amazonka-ec2/gen/Network/AWS/EC2/UnmonitorInstances.hs
|
mpl-2.0
| 4,346 | 0 | 10 | 859 | 503 | 306 | 197 | 60 | 1 |
{-# LANGUAGE TypeOperators, TypeFamilies, FlexibleContexts
, TypeSynonymInstances, MultiParamTypeClasses, Rank2Types
, FlexibleInstances, ScopedTypeVariables
, ConstraintKinds #-}
{-# OPTIONS_GHC -Wall -fno-warn-orphans #-}
----------------------------------------------------------------------
-- |
-- Module : Shady.Image
-- Copyright : (c) Conal Elliott 2009
-- License : GPLv3
--
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Images (infinite & continuous)
----------------------------------------------------------------------
-- This variation uses Complex
module Shady.Image
(
Point, pointToR2, r2ToPoint
, FilterG, Filter, samplerIm, scale2, uscale2, translate2, rotate2
, bilerp, bilerpC
, ImageG, Image
-- * General regions
, PRegion, Region
, universeR, emptyR, eqF, neqF, intersectR, unionR, xorR, diffR, complementR
, udisk, disk, annulus, checker
, crop
-- * Space-varying transformations
, transformG, translate2Im, scale2Im, uscale2Im, rotate2Im
, swirl -- , uswirl
, utile, tile
) where
import Control.Applicative (Applicative(..),liftA2)
import Shady.Complex
import Data.VectorSpace
import Data.Boolean
import Shady.Misc
import Shady.Language.Exp
import Shady.Color
import Shady.ITransform
type Point = ComplexE R
pointToR2 :: Point -> R2E
pointToR2 (x :+ y) = vec2 x y
r2ToPoint :: R2E -> Point
r2ToPoint xy = getX xy :+ getY xy
-- | Generalized image -- continuous & infinite
type ImageG s a = Complex s -> a
-- | Continuous, infinite image
type Image a = ImageG FloatE a
-- == Point -> a
-- | Generalized filter, polymorphic over domain
type FilterG p a = Unop (p -> a)
-- | Image filter
type Filter a = FilterG Point a
-- Unop (Image a)
-- | Wrap up a sampler as an image
samplerIm :: Sampler2 :=> Image Color
samplerIm s = r4ToColor . texture s . pointToR2
-- -- | 2D invertible transform
-- type ITransform2 = ITransform Point
translate2X :: AdditiveGroup a => a -> ITransform a
scale2X :: Fractional s => Complex s -> ITransform (Complex s)
uscale2X :: Fractional s => s -> ITransform (Complex s)
rotate2X :: (AdditiveGroup s, Eq s, Floating s) => s -> ITransform (Complex s)
translate2X = andInverse (^+^) negateV
scale2X = andInverse (onRI2 (*)) (onRI recip)
rotate2X = andInverse rotate2C negate
uscale2X = scale2X . \ a -> a :+ a
rotate2C :: (AdditiveGroup s, Eq s, Floating s) => s -> Unop (Complex s)
rotate2C theta = (cis theta *)
-- experiment
-- translate2, scale2 :: (AdditiveGroup s, Eq s, Floating s, ITrans (Complex s) a) => Complex s -> Unop a
-- uscale2,rotate2 :: (AdditiveGroup s, Eq s, Floating s, ITrans (Complex s) a) => s -> Unop a
-- type TransScalar s = (AdditiveGroup s, Eq s, Floating s)
type TransScalar s = (s ~ E R1)
translate2, scale2 :: (TransScalar s, ITrans (Complex s) a) => Complex s -> Unop a
uscale2,rotate2 :: (TransScalar s, ITrans (Complex s) a) => s -> Unop a
translate2 = (*:) . translate2X
scale2 = (*:) . scale2X
rotate2 = (*:) . rotate2X
uscale2 = (*:) . uscale2X
-- translate2 :: ITransform Point
-- (*:) :: ITransform w -> Unop a
-- (*:) . translate2 :: ITransform Point
-- :: ITransform w -> Unop a
-- | Bilinear interpolation
bilerp :: VectorSpace w =>
w -> w -> w -> w -> (Scalar w, Scalar w) -> w
bilerp ll lr ul ur (dx,dy) =
lerp (lerp ll lr dx) (lerp ul ur dx) dy
-- | Bilinear interpolation image
bilerpC :: (VectorSpace w, Scalar w ~ s) =>
w -> w -> w -> w -> ImageG s w
bilerpC ll lr ul ur (dx :+ dy) = bilerp ll lr ul ur (dx,dy)
{--------------------------------------------------------------------
Generalized regions
--------------------------------------------------------------------}
-- TODO: Move most of these definitions elsewhere, since they're not
-- specific to 2D.
-- | Region over general space
type PRegion p = p -> BoolE
-- | 2D spatial region
type Region = Image BoolE
universeR, emptyR :: Applicative f => f BoolE
universeR = pure true
emptyR = pure false
eqF, neqF :: (IsNat n, IsScalar a, Eq a, Applicative f) =>
f (VecE n a) -> f (VecE n a) -> f BoolE
eqF = liftA2 (==^)
neqF = liftA2 (/=^)
-- intersectR, unionR, xorR, diffR
-- :: LiftA2 BoolE BoolE BoolE b b b => b -> b -> b
-- complementR :: LiftA1 BoolE BoolE b b => b -> b
intersectR, unionR, xorR, diffR :: Applicative f => Binop (f BoolE)
complementR :: Applicative f => Unop (f BoolE)
intersectR = liftA2 (&&*)
unionR = liftA2 (||*)
complementR = fmap notE
xorR = neqF
diffR r r' = r `intersectR` complementR r'
-- | Generalized unit disk/ball
udisk :: (InnerSpace p, Scalar p ~ FloatE) => PRegion p
udisk p = magnitudeSq p <=* 1
-- | Generalized disk/ball, given radius
disk :: (InnerSpace p, Scalar p ~ FloatE) => FloatE -> PRegion p
disk s = udisk . (^/ s)
-- | Generalized annulus, given outer & inner radii
annulus :: (InnerSpace p, Scalar p ~ FloatE) => FloatE -> FloatE -> PRegion p
annulus o i = disk o `diffR` disk i
-- | Checker-board
checker :: Region
checker (x :+ y) = getX c ==* getY c
where c = frac (x <+> y) >* 0.5
-- checker (x :+ y) = big x ==* big y
-- where
-- big = (>* 0.5) . frac
{--------------------------------------------------------------------
Some generalized transforms
--------------------------------------------------------------------}
-- | General domain-varying transformation.
transformG' :: (c -> Unop p) -> (p -> c) -> Unop (p -> a)
transformG' f imc ima p = ima (f (imc p) p)
-- transformG' :: (c -> Unop Point) -> Image c -> Filter a
-- | General domain-varying transformation.
transformG :: (c -> ITransform p) -> (p -> c) -> Unop (p -> a)
transformG f = transformG' (itBackward . f)
-- transformG :: (c -> ITransform2) -> Image c -> Filter a
-- translate2Im :: Image Point -> Filter a
-- scale2Im :: Image Point -> Filter a
-- uscale2Im :: Image FloatE -> Filter a
-- rotate2Im :: Image FloatE -> Filter a
-- | Space-varying 'translate2'
translate2Im :: AdditiveGroup p => Unop p -> Unop (p -> a)
translate2Im = transformG translate2X
-- | Space-varying 'scale2'
scale2Im :: Fractional s => Unop (Complex s) -> Unop (ImageG s a)
scale2Im = transformG scale2X
-- | Space-varying 'uscale2'
uscale2Im :: Fractional s => ImageG s s -> Unop (ImageG s a)
uscale2Im = transformG uscale2X
-- | Space-varying 'rotate2'
rotate2Im :: (AdditiveGroup s, Eq s, Floating s) => ImageG s s -> Unop (ImageG s a)
rotate2Im = transformG rotate2X
{--------------------------------------------------------------------
Other transformations
--------------------------------------------------------------------}
-- -- | Unit swirl
-- uswirl :: Filter a
-- uswirl = rotate2Im magnitude
-- -- | Swirl transformation
-- swirl :: FloatE -> Filter a
-- swirl s = hyperUscale2 s uswirl
-- *Almost* equivalent, but differs for negative s.
-- | Swirl transformation
swirl :: (AdditiveGroup s, Eq s, Floating s) => s -> Unop (ImageG s a)
swirl s = rotate2Im ((2*pi*s*) . magnitude)
utile' :: Frac p => Unop (p -> a)
utile' = (. frac)
-- Hm! This utile' definition repeats [0,1), not [-.5,.5). Eep. How can
-- I shift without loss of generality? For instance, the current
-- definition can handle nD.
-- | Unit, rectangular tiling.
-- utile :: (AdditiveGroup s, Eq s, Frac p, ITrans (Complex s) p, ITrans (Complex s) a, Floating s) =>
-- Unop (p -> a)
-- utile :: Unop (Image a)
-- utile = translate2 (negate (0.5 :+ 0.5)) utile'
-- -- utile :: Unop (Image a)
-- utile :: forall p a s. (Fractional s, Frac p, ITrans (Complex s) a) =>
-- Unop (p -> a)
-- utile = translate2 (negate (0.5 :+ 0.5 :: Complex s)) utile'
utile :: (Frac p, AdditiveGroup p, Floating p, Eq p) =>
Unop (Complex p -> a)
utile f = utile' (f . subtract (0.5 :+ 0.5))
-- TODO: Generalize uniform scaling to arbitrary vector spaces, scaling
-- via scalar field.
-- Rectangle tiling with given size.
-- tile :: ITrans Point a => Point -> Filter a
-- tile :: (AdditiveGroup s, Eq s, Floating s, Frac s, ITrans (Complex s) a) =>
-- Complex s -> Unop (ImageG s a)
-- tile :: ( AdditiveGroup s, Floating s, Eq s
-- , ITrans (Complex s) Point, ITrans (Complex s) a) =>
-- Complex s -> Unop (Image a)
tile :: ( TransScalar s
, AdditiveGroup p, Floating p, Eq p
, ITrans (Complex s) (Complex p), ITrans (Complex s) a, Frac p
) =>
Complex s -> Unop (Complex p -> a)
tile s = scale2 s utile
-- tile = flip scale2 utile
-- instance ITrans (Complex s) (Complex s) where (*:) = itForward
{--------------------------------------------------------------------
Orphans
--------------------------------------------------------------------}
-- Standard do-nothing transformation
instance ITrans Point Color where (*:) = const id
|
conal/shady-graphics
|
src/Shady/Image.hs
|
agpl-3.0
| 8,908 | 0 | 12 | 1,892 | 2,016 | 1,117 | 899 | 111 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
module HmfParserSpec where
import Common
import HMF
------------------------------------------------------------------------------
import Control.Monad.Free
import Control.Monad.State
import Test.Hspec
spec :: Spec
spec = describe "HmfParserSpec" xxx
xxx = do
it "a1" $
pp "flushPage [11,22,33];"
`shouldBe`
Right ("", ([[1,2,3], [4,5,6]], 1, 0, [[11,22,33]], []))
it "a2" $
pp "pageMisses;"
`shouldBe`
Right ("", ([[4,5,6]], 0, 1, [], [[1,2,3]]))
it "a3" $
pp " flushPage [11,22,33] ; pageMisses ; flushPage [111,222,333];\n"
`shouldBe`
Right ("", ([[4,5,6]], 2, 1, [[111,222,333], [11,22,33]], [[1,2,3]]))
it "a4" $
pp "pageMisses;\nflushPage [111,222,333]"
`shouldBe`
Right ("", ([[4,5,6]], 1, 1, [[111,222,333]], [[1,2,3]]))
it "a5" $
pp "pageMisses\nxxx"
`shouldBe`
Left "xxx [] endOfInput"
it "a6" $
pp "xxx"
`shouldBe`
Left "xxx [] string"
it "a7" $
pp " flushPage [11,22,33] ; pageXisses ; flushPage [111,222,333];\n"
`shouldBe`
Left "pageXisses ; flushPage [111,222,333];\n [] endOfInput"
pp s = do
(u, r) <- parseFully (parseHmf s)
let a = xhmf r
return (u, a)
-- in/PM nFP nPM out/FP out/PM
analyzeHmf :: HmfCmd a -> ([[Int]], Int, Int, [[Int]], [[Int]])
analyzeHmf t = execState
(a t) ( [[1,2,3],[4,5,6]] -- input to PM
, 0 -- num FP
, 0 -- num PM
, [] -- output of FP
, [] -- output of PM
)
where
a = foldFree $ \case
FlushPage ps next -> do
( ipm, nfp, npm, ofp, opm ) <- get
put (ipm, nfp+1, npm, ps:ofp, opm )
return next
PageMisses next -> do
( i:ipm, nfp, npm, ofp, opm ) <- get
put (ipm, nfp, npm+1, ofp, i:opm )
return (next i)
xhmf :: Sig Ty HmfCmd -> ([[Int]], Int, Int, [[Int]], [[Int]])
xhmf (Sig _ f) = analyzeHmf f
|
haroldcarr/learn-haskell-coq-ml-etc
|
haskell/topic/fix-free/2016-01-benjamin-hodgson-parsing-to-free-monads/test/HmfParserSpec.hs
|
unlicense
| 2,133 | 0 | 15 | 704 | 826 | 474 | 352 | 61 | 2 |
quicksort :: (Ord a) => [a] -> [a]
quicksort [] = []
quicksort (x:xs) =
let smaller = [a | a <- xs, a <= x]
larger = [a | a <- xs, a > x]
in quicksort smaller ++ [x] ++ quicksort larger
quicksort' :: (Ord a) => [a] -> [a]
quicksort' [] = []
quicksort (x:xs) =
let smaller = filter (<= x) xs
larger = filter (> x) xs
in quicksort smaller ++ [x] ++ quicksort larger
|
mikoim/musor
|
haskell/Learn You a Haskell for Great Good!/sort.hs
|
unlicense
| 394 | 0 | 11 | 110 | 225 | 118 | 107 | -1 | -1 |
{-
Created : 2014 Oct 07 (Tue) 08:53:35 by Harold Carr.
Last Modified : 2014 Oct 19 (Sun) 21:01:05 by Harold Carr.
2012-01-01
http://www.haskellforall.com/2012/01/haskell-for-c-programmers-for-loops.html
FOR LOOPS
-}
module X_2012_01_01_loops where
import Control.Applicative
import Control.Monad.State.Lazy
import Data.Lens.Lazy
-- FOLDS
{-
C:
double sum(double *xs, size_t num_xs) {
size_t i;
double result = 0.0;
for (i = 0; i < num_xs; i++) {
result += xs[i];
}
return result;
}
-}
-- C-like loops in Haskell:
while :: (Monad m) => m Bool -> m a -> m ()
while cond action = do
c <- cond
when c $ do
action
while cond action
for :: (Monad m) => m a -> m Bool -> m b -> m c -> m ()
for init0 cond post action = do
init0
while cond $ do
action
post
data Status = Status { _i :: Int, _result :: Double }
class Default a where def :: a
instance Default Int where def = 0
instance Default Double where def = 0.0
instance Default Status where def = Status def def
i :: Lens Status Int
i = lens _i (\x s -> s { _i = x })
result :: Lens Status Double
result = lens _result (\x s -> s { _result = x })
sum1 :: [Double] -> Int -> Double
sum1 xs n = flip evalState def $ do
result ~= 0
for (i ~= 0) (liftM (< n) (access i)) (i += 1) $ do
i' <- access i
result += (xs !! i')
access result
sum2 :: [Double] -> Double
sum2 xs = flip evalState def $ do
-- i.e. foreach (double x in xs) { ... }
forM xs $ \x ->
modify (\a -> a + x)
get
sum3 :: [Double] -> Double
sum3 xs = flip evalState (def :: Double) $
foldM (\a x -> return (a + x)) 0.0 xs
sum4 :: [Double] -> Double
sum4 xs = foldl (\x a -> x + a) 0.0 xs
sum5 :: [Double] -> Double
sum5 = foldl (+) 0.0
-- LOOPS
{-
C:
void printer(double *xs, size_t num_xs)
{
for (i = 0; i < num_xs; i++)
{ printf("%f\n", xs[i]); }
}
-}
{- TODO
-- printer1 :: (Show a) => [a] -> Int -> IO ()
printer1 xs n = flip execStateT 0 $
for (put 0) (liftM (< n) get) (modify (+ 1)) $ do
i' <- get
print $ xs !! i'
-}
printer2 :: (Show a) => [a] -> IO ()
printer2 xs = forM_ xs $ \x -> print x
printer3 :: (Show a) => [a] -> IO ()
printer3 xs = forM_ xs print
printer4 :: (Show a) => [a] -> IO ()
printer4 = mapM_ print
nested1 :: IO ()
nested1 =
forM_ [1..10::Int] $ \i' ->
forM_ [1..10::Int] $ \j ->
print (i', j)
nested2 :: IO ()
nested2 =
mapM_ print $ do
i' <- [1..10::Int]
j <- [1..10::Int]
return (i', j)
nested3 :: IO ()
nested3 =
mapM_ print [(i', j) | i' <- [1..10::Int], j <- [1..10::Int]]
nested4 :: IO ()
nested4 =
mapM_ print $ (,) <$> [1..10::Int] <*> [1..10::Int]
-- Modularity
-- separate data generation from data consumption
generator :: [(Integer, Integer)]
generator = (,) <$> [1..10] <*> [1..10]
consumer :: (Show a) => [a] -> IO ()
consumer = mapM_ print
-- End of file.
|
haroldcarr/learn-haskell-coq-ml-etc
|
haskell/topic/general/haskellforall/src/X_2012_01_01_loops.hs
|
unlicense
| 3,034 | 0 | 13 | 888 | 1,080 | 571 | 509 | 71 | 1 |
module Graham.A092487Spec (main, spec) where
import Test.Hspec
import Graham.A092487 (a092487)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A092487" $
it "correctly computes the first 20 elements" $
take 20 (map a092487 [1..]) `shouldBe` expectedValue where
expectedValue = [0,4,5,0,5,6,7,7,0,8,11,8,13,7,9,0,17,9,19,10]
|
peterokagey/haskellOEIS
|
test/Graham/A092487Spec.hs
|
apache-2.0
| 352 | 0 | 10 | 59 | 160 | 95 | 65 | 10 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
module Kubernetes.V1.PodCondition where
import GHC.Generics
import Data.Text
import Data.Aeson.TH (deriveJSON, defaultOptions, fieldLabelModifier)
-- | PodCondition contains details for the current condition of this pod.
data PodCondition = PodCondition
{ type_ :: Text -- ^ Type is the type of the condition. Currently only Ready. More info: http://releases.k8s.io/HEAD/docs/user-guide/pod-states.md#pod-conditions
, status :: Text -- ^ Status is the status of the condition. Can be True, False, Unknown. More info: http://releases.k8s.io/HEAD/docs/user-guide/pod-states.md#pod-conditions
, lastProbeTime :: Maybe Text -- ^ Last time we probed the condition.
, lastTransitionTime :: Maybe Text -- ^ Last time the condition transitioned from one status to another.
, reason :: Maybe Text -- ^ Unique, one-word, CamelCase reason for the condition's last transition.
, message :: Maybe Text -- ^ Human-readable message indicating details about last transition.
} deriving (Show, Eq, Generic)
$(deriveJSON defaultOptions{fieldLabelModifier = (\n -> if Prelude.last n == '_' then Prelude.take ((Prelude.length n) - 1 ) n else n)} ''PodCondition)
|
minhdoboi/deprecated-openshift-haskell-api
|
kubernetes/lib/Kubernetes/V1/PodCondition.hs
|
apache-2.0
| 1,381 | 0 | 18 | 216 | 188 | 112 | 76 | 19 | 0 |
-- Final Countdown
-- from chapter 14 of Learn you a Haskell
-- Demonstrates faster performance of DiffList which
-- represents a list as nested applications of concatenate
-- ex: runhaskell --ghc-arg="-package mtl" haskell/final_countdown.hs
import Control.Monad.Writer
newtype DiffList a = DiffList { getDiffList :: [a] -> [a] }
toDiffList :: [a] -> DiffList a
toDiffList xs = DiffList (xs++)
fromDiffList :: DiffList a -> [a]
fromDiffList (DiffList f) = f []
instance Show a => Show (DiffList a) where
show d = "DiffList " ++ (show $ fromDiffList d)
instance Semigroup (DiffList a) where
(DiffList f) <> (DiffList g) = DiffList (\xs -> f (g xs))
instance Monoid (DiffList a) where
mempty = DiffList (\xs -> [] ++ xs)
finalCountDown :: Int -> Writer (DiffList String) ()
finalCountDown 0 = do
tell (toDiffList ["0"])
finalCountDown x = do
finalCountDown (x-1)
tell (toDiffList [show x])
stringCountDown :: Int -> Writer [String] ()
stringCountDown 0 = do
tell ["0"]
stringCountDown x = do
stringCountDown (x-1)
tell [show x]
main = do
putStrLn "Countdown from:"
line <- getLine
let n = (read line) :: Int
mapM_ putStrLn . fromDiffList . snd . runWriter $ finalCountDown n
mapM_ putStrLn . snd . runWriter $ stringCountDown n
|
cbare/Etudes
|
haskell/final_countdown.hs
|
apache-2.0
| 1,308 | 0 | 11 | 283 | 471 | 236 | 235 | 30 | 1 |
module External.A009766Spec (main, spec) where
import Test.Hspec
import External.A009766 (a009766)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A009766" $
it "correctly computes the first 20 elements" $
map a009766 [0..19] `shouldBe` expectedValue where
expectedValue = [1, 1, 1, 1, 2, 2, 1, 3, 5, 5, 1, 4, 9, 14, 14, 1, 5, 14, 28, 42]
|
peterokagey/haskellOEIS
|
test/External/A009766Spec.hs
|
apache-2.0
| 367 | 0 | 8 | 76 | 154 | 92 | 62 | 10 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
module Kubernetes.V1.SELinuxContextStrategyOptions where
import GHC.Generics
import Data.Text
import Kubernetes.V1.SELinuxOptions
import Data.Aeson.TH (deriveJSON, defaultOptions, fieldLabelModifier)
-- |
data SELinuxContextStrategyOptions = SELinuxContextStrategyOptions
{ type_ :: Maybe Text -- ^ strategy used to generate the SELinux context
, seLinuxOptions :: Maybe SELinuxOptions -- ^ seLinuxOptions required to run as; required for MustRunAs
} deriving (Show, Eq, Generic)
$(deriveJSON defaultOptions{fieldLabelModifier = (\n -> if Prelude.last n == '_' then Prelude.take ((Prelude.length n) - 1 ) n else n)} ''SELinuxContextStrategyOptions)
|
minhdoboi/deprecated-openshift-haskell-api
|
kubernetes/lib/Kubernetes/V1/SELinuxContextStrategyOptions.hs
|
apache-2.0
| 869 | 0 | 18 | 118 | 160 | 94 | 66 | 16 | 0 |
module Calendar where
import Data.List
import Data.List.Split
import Data.Time.Calendar
import Data.Time.Calendar.OrdinalDate
import Data.Time.Format
import Data.Time.Clock
import Text.Printf
type Week = [Day]
type Month = [Week]
-- Converts a list into a string by using a formatter function
listToString :: [a] -> (a -> String) -> String
listToString ls formatter = foldr (\v acc -> formatter v ++ acc) "" ls
-- Join a list of strings with a space separator
joinStrings :: [[String]] -> [String]
joinStrings ls = map (\a -> intercalate " " a) ls
-- gives all days of a year
datesInYear :: Integer -> [Day]
datesInYear year = [beginOfYear year..endOfYear year] where
beginOfYear year = fromGregorian year 1 1
endOfYear year = addDays (-1) $ beginOfYear (year+1)
monthOfDate :: Day -> Int
monthOfDate date = month where
(_, month, _) = toGregorian date
dayOfDate :: Day -> Int
dayOfDate date = day where
(_, _, day) = toGregorian date
-- returns the calender week number (1..52)
weekNumber :: Day -> Int
weekNumber date = week where
(week, _) = mondayStartWeek date
-- returns the week day number (monday==1 , sunday==7)
weekDay :: Day -> Int
weekDay date = day where
(_, day) = mondayStartWeek date
monthName :: Day -> String
monthName date = printf "%-21s" $ formatTime defaultTimeLocale "%B" date
byMonth = groupBy (\a b -> (monthOfDate a) == (monthOfDate b))
byWeek = groupBy (\a b -> (weekNumber a) == (weekNumber b))
monthByWeek = map byWeek
showWeek :: Week -> String
showWeek ls = frontPadding ++ days ++ backPadding where
frontPadding = replicate (3 * ((weekDay $ head ls) - 1)) ' '
days = listToString (map dayOfDate ls) (\d -> printf "%3d" d)
backPadding = replicate (3 * (7 - (weekDay $ last ls))) ' '
-- Every months is shown by exactly 7 lines: name, weeks + padding
showMonth :: Month -> [String]
showMonth ls = [showMonthName] ++ weeks ++ padding where
showMonthName = (monthName $ head $ head ls)
weeks = map showWeek ls
padding = replicate (6 - (length ls)) emptyLine
emptyLine = replicate 21 ' '
showCalendar :: Integer -> Int -> [String]
showCalendar year cols =
let months = map showMonth (monthByWeek . byMonth $ datesInYear year)
cl = chunksOf cols months
tl = map transpose cl
in concat $ map joinStrings tl
getCurrentYear = do
utc <- getCurrentTime
let now = utctDay utc
let (year, _, _) = toGregorian now
return year
|
marmutro/console-calendar
|
Calendar.hs
|
bsd-2-clause
| 2,475 | 0 | 14 | 534 | 855 | 454 | 401 | 57 | 1 |
module Main where
import System.ZMQ
import Control.Monad (forever)
import Data.ByteString.Char8 (unpack, empty)
import System.IO (hSetBuffering, stdout, BufferMode(..))
import Control.Concurrent (threadDelay)
main :: IO ()
main = withContext 1 $ \context -> do
withSocket context Pull $ \receiver -> do
connect receiver "tcp://localhost:5557"
withSocket context Push $ \sender -> do
connect sender "tcp://localhost:5558"
withSocket context Sub $ \controller -> do
connect controller "tcp://localhost:5559"
subscribe controller ""
hSetBuffering stdout NoBuffering
pollContinuously receiver controller sender
pollContinuously :: Socket a -> Socket b -> Socket c -> IO ()
pollContinuously rec cont sends = do
[S rec' e1', S cont' e2'] <- poll [S rec In, S cont In] (-1)
case e2' of
In -> return ()
None -> do
msg <- receive rec []
let sleep = (read $ unpack msg) :: Int
threadDelay $ sleep * 1000 * 1000
send sends msg []
print "."
pollContinuously rec cont sends
|
krattai/noo-ebs
|
docs/zeroMQ-guide2/examples/Haskell/taskwork2.hs
|
bsd-2-clause
| 1,128 | 0 | 21 | 314 | 386 | 187 | 199 | 29 | 2 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.AmountOfMoney.BG.Corpus
( corpus
) where
import Data.String
import Prelude
import Duckling.AmountOfMoney.Types
import Duckling.Locale
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {locale = makeLocale BG Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (simple BGN 1)
[ "1 лв"
, "един лев"
, "1 Лев"
]
, examples (simple BGN 10)
[ "10 лв"
, "десет лева"
, "10лв"
]
, examples (simple BGN 15.50)
[ "15лв и 50ст"
, "петнадесет лева и петдесет стотинки"
, "15 Лв и 50 Ст"
]
, examples (simple Dollar 1)
[ "$1"
, "един долар"
, "1 долар"
]
, examples (simple Dollar 10)
[ "$10"
, "$ 10"
, "10$"
, "10 Долара"
, "десет долара"
]
, examples (simple Cent 10)
[ "10 цента"
, "десет пенита"
, "десет цента"
, "10¢"
]
, examples (simple Cent 50)
[ "50 ст"
, "петдесет стотинки"
, "50ст"
]
, examples (simple Dollar 1e4)
[ "$10К"
, "10к$"
, "$10,000"
]
, examples (simple USD 3.14)
[ "USD3.14"
, "3.14US$"
, "US$ 3.14"
]
, examples (simple EUR 20)
[ "20\x20ac"
, "20 евро"
, "20 Евро"
, "EUR 20"
, "EUR 20.0"
, "20€"
, "20 €ur"
]
, examples (simple Pound 10)
[ "\x00a3\&10"
, "десет паунда"
]
, examples (simple Dollar 20.43)
[ "$20 и 43ц"
, "$20 43"
, "20 долара 43ц"
, "20 долара 43 цента"
, "двадесет долара 43 цента"
, "20 долара 43"
, "двадесет долара и 43"
]
, examples (simple GBP 3.01)
[ "GBP3.01"
, "GBP 3.01"
, "3 GBP 1 пени"
]
, examples (between Dollar (10, 20))
[ "между 10 и 20 долара"
, "от 10 до 20 долара"
, "около 10-20 долара"
, "между 10 и 20 долара"
, "около $10-$20"
, "10-20 долара"
]
, examples (under EUR 7)
[ "под седем евро"
, "по-малко от 7 Евро"
, "под 7€"
]
, examples (above Dollar 1.42)
[ "над 1 долар и четиридесет и два цента"
, "поне $1.42"
, "над 1.42 долара"
]
]
|
facebookincubator/duckling
|
Duckling/AmountOfMoney/BG/Corpus.hs
|
bsd-3-clause
| 3,442 | 0 | 10 | 1,425 | 542 | 316 | 226 | 90 | 1 |
module Zero.Registration.Client
(
initCredentialsHkdf
, bindRegistration
) where
import Control.Monad.IO.Class (liftIO)
import qualified Data.Text as T
import Data.Text (Text)
import Data.ByteString.Char8 (ByteString)
import Reflex.Dom.Core
import Zero.Sjcl.BitArray (BitArray, bitSlice)
import qualified Zero.Sjcl.Hex as Hex
import qualified Zero.Sjcl.Utf8 as Utf8
import Zero.Sjcl.Hkdf (hkdf)
import Zero.SRP.Client (calcV)
import Zero.Crypto (hexBS)
import Zero.Crypto.Client (generateBytes)
import Zero.Account.Client (Credentials(..))
import Zero.Registration.Internal (Registration(..))
------------------------------------------------------------------------------
initCredentialsHkdf :: MonadWidget t m => Event t (Text, Text) -> m (Event t Credentials)
initCredentialsHkdf e =
performEvent $ ffor e $ \(email, stretchedPass) -> liftIO $ do
srpSalt <- generateBytes 32
stretched <- hkdf (Hex.toBits stretchedPass) (64 * 8) (Hex.toBits srpSalt) email
let p = bitSlice stretched 0 (32 * 8)
let w = bitSlice stretched (32 * 8) (64 * 8)
return $ Credentials email srpSalt (Hex.fromBits p) (Hex.fromBits w)
bindRegistration :: Credentials -> Registration
bindRegistration Credentials{..} =
let
v = T.pack $ calcV (T.unpack c_email) (T.unpack c_salt) (T.unpack c_pass)
in
Registration c_email v c_salt
|
et4te/zero
|
src/Zero/Registration/Client.hs
|
bsd-3-clause
| 1,463 | 0 | 15 | 314 | 454 | 252 | 202 | -1 | -1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[TcMonoType]{Typechecking user-specified @MonoTypes@}
-}
{-# LANGUAGE CPP, TupleSections, MultiWayIf, RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-}
module TcHsType (
-- Type signatures
kcClassSigType, tcClassSigType,
tcHsSigType, tcHsSigWcType,
tcHsPartialSigType,
tcStandaloneKindSig,
funsSigCtxt, addSigCtxt, pprSigCtxt,
tcHsClsInstType,
tcHsDeriv, tcDerivStrategy,
tcHsTypeApp,
UserTypeCtxt(..),
bindImplicitTKBndrs_Tv, bindImplicitTKBndrs_Skol,
bindImplicitTKBndrs_Q_Tv, bindImplicitTKBndrs_Q_Skol,
bindExplicitTKBndrs_Tv, bindExplicitTKBndrs_Skol,
bindExplicitTKBndrs_Q_Tv, bindExplicitTKBndrs_Q_Skol,
ContextKind(..),
-- Type checking type and class decls
kcLookupTcTyCon, bindTyClTyVars,
etaExpandAlgTyCon, tcbVisibilities,
-- tyvars
zonkAndScopedSort,
-- Kind-checking types
-- No kind generalisation, no checkValidType
InitialKindStrategy(..),
SAKS_or_CUSK(..),
kcDeclHeader,
tcNamedWildCardBinders,
tcHsLiftedType, tcHsOpenType,
tcHsLiftedTypeNC, tcHsOpenTypeNC,
tcLHsType, tcLHsTypeUnsaturated, tcCheckLHsType,
tcHsMbContext, tcHsContext, tcLHsPredType, tcInferApps,
failIfEmitsConstraints,
solveEqualities, -- useful re-export
typeLevelMode, kindLevelMode,
kindGeneralizeAll, kindGeneralizeSome, kindGeneralizeNone,
checkExpectedKind_pp,
-- Sort-checking kinds
tcLHsKindSig, checkDataKindSig, DataSort(..),
checkClassKindSig,
-- Pattern type signatures
tcHsPatSigType, tcPatSig,
-- Error messages
funAppCtxt, addTyConFlavCtxt
) where
#include "HsVersions.h"
import GhcPrelude
import GHC.Hs
import TcRnMonad
import TcOrigin
import Predicate
import Constraint
import TcEvidence
import TcEnv
import TcMType
import TcValidity
import TcUnify
import GHC.IfaceToCore
import TcSimplify
import TcHsSyn
import TyCoRep
import TyCoPpr
import TcErrors ( reportAllUnsolved )
import TcType
import Inst ( tcInstInvisibleTyBinders, tcInstInvisibleTyBinder )
import Type
import TysPrim
import RdrName( lookupLocalRdrOcc )
import Var
import VarSet
import TyCon
import ConLike
import DataCon
import Class
import Name
-- import NameSet
import VarEnv
import TysWiredIn
import BasicTypes
import SrcLoc
import Constants ( mAX_CTUPLE_SIZE )
import ErrUtils( MsgDoc )
import Unique
import UniqSet
import Util
import UniqSupply
import Outputable
import FastString
import PrelNames hiding ( wildCardName )
import DynFlags
import qualified GHC.LanguageExtensions as LangExt
import Maybes
import Data.List ( find )
import Control.Monad
{-
----------------------------
General notes
----------------------------
Unlike with expressions, type-checking types both does some checking and
desugars at the same time. This is necessary because we often want to perform
equality checks on the types right away, and it would be incredibly painful
to do this on un-desugared types. Luckily, desugared types are close enough
to HsTypes to make the error messages sane.
During type-checking, we perform as little validity checking as possible.
Generally, after type-checking, you will want to do validity checking, say
with TcValidity.checkValidType.
Validity checking
~~~~~~~~~~~~~~~~~
Some of the validity check could in principle be done by the kind checker,
but not all:
- During desugaring, we normalise by expanding type synonyms. Only
after this step can we check things like type-synonym saturation
e.g. type T k = k Int
type S a = a
Then (T S) is ok, because T is saturated; (T S) expands to (S Int);
and then S is saturated. This is a GHC extension.
- Similarly, also a GHC extension, we look through synonyms before complaining
about the form of a class or instance declaration
- Ambiguity checks involve functional dependencies
Also, in a mutually recursive group of types, we can't look at the TyCon until we've
finished building the loop. So to keep things simple, we postpone most validity
checking until step (3).
%************************************************************************
%* *
Check types AND do validity checking
* *
************************************************************************
-}
funsSigCtxt :: [Located Name] -> UserTypeCtxt
-- Returns FunSigCtxt, with no redundant-context-reporting,
-- form a list of located names
funsSigCtxt (L _ name1 : _) = FunSigCtxt name1 False
funsSigCtxt [] = panic "funSigCtxt"
addSigCtxt :: UserTypeCtxt -> LHsType GhcRn -> TcM a -> TcM a
addSigCtxt ctxt hs_ty thing_inside
= setSrcSpan (getLoc hs_ty) $
addErrCtxt (pprSigCtxt ctxt hs_ty) $
thing_inside
pprSigCtxt :: UserTypeCtxt -> LHsType GhcRn -> SDoc
-- (pprSigCtxt ctxt <extra> <type>)
-- prints In the type signature for 'f':
-- f :: <type>
-- The <extra> is either empty or "the ambiguity check for"
pprSigCtxt ctxt hs_ty
| Just n <- isSigMaybe ctxt
= hang (text "In the type signature:")
2 (pprPrefixOcc n <+> dcolon <+> ppr hs_ty)
| otherwise
= hang (text "In" <+> pprUserTypeCtxt ctxt <> colon)
2 (ppr hs_ty)
tcHsSigWcType :: UserTypeCtxt -> LHsSigWcType GhcRn -> TcM Type
-- This one is used when we have a LHsSigWcType, but in
-- a place where wildcards aren't allowed. The renamer has
-- already checked this, so we can simply ignore it.
tcHsSigWcType ctxt sig_ty = tcHsSigType ctxt (dropWildCards sig_ty)
kcClassSigType :: SkolemInfo -> [Located Name] -> LHsSigType GhcRn -> TcM ()
kcClassSigType skol_info names sig_ty
= discardResult $
tcClassSigType skol_info names sig_ty
-- tcClassSigType does a fair amount of extra work that we don't need,
-- such as ordering quantified variables. But we absolutely do need
-- to push the level when checking method types and solve local equalities,
-- and so it seems easier just to call tcClassSigType than selectively
-- extract the lines of code from tc_hs_sig_type that we really need.
-- If we don't push the level, we get #16517, where GHC accepts
-- class C a where
-- meth :: forall k. Proxy (a :: k) -> ()
-- Note that k is local to meth -- this is hogwash.
tcClassSigType :: SkolemInfo -> [Located Name] -> LHsSigType GhcRn -> TcM Type
-- Does not do validity checking
tcClassSigType skol_info names sig_ty
= addSigCtxt (funsSigCtxt names) (hsSigType sig_ty) $
snd <$> tc_hs_sig_type skol_info sig_ty (TheKind liftedTypeKind)
-- Do not zonk-to-Type, nor perform a validity check
-- We are in a knot with the class and associated types
-- Zonking and validity checking is done by tcClassDecl
-- No need to fail here if the type has an error:
-- If we're in the kind-checking phase, the solveEqualities
-- in kcTyClGroup catches the error
-- If we're in the type-checking phase, the solveEqualities
-- in tcClassDecl1 gets it
-- Failing fast here degrades the error message in, e.g., tcfail135:
-- class Foo f where
-- baa :: f a -> f
-- If we fail fast, we're told that f has kind `k1` when we wanted `*`.
-- It should be that f has kind `k2 -> *`, but we never get a chance
-- to run the solver where the kind of f is touchable. This is
-- painfully delicate.
tcHsSigType :: UserTypeCtxt -> LHsSigType GhcRn -> TcM Type
-- Does validity checking
-- See Note [Recipe for checking a signature]
tcHsSigType ctxt sig_ty
= addSigCtxt ctxt (hsSigType sig_ty) $
do { traceTc "tcHsSigType {" (ppr sig_ty)
-- Generalise here: see Note [Kind generalisation]
; (insol, ty) <- tc_hs_sig_type skol_info sig_ty
(expectedKindInCtxt ctxt)
; ty <- zonkTcType ty
; when insol failM
-- See Note [Fail fast if there are insoluble kind equalities] in TcSimplify
; checkValidType ctxt ty
; traceTc "end tcHsSigType }" (ppr ty)
; return ty }
where
skol_info = SigTypeSkol ctxt
-- Does validity checking and zonking.
tcStandaloneKindSig :: LStandaloneKindSig GhcRn -> TcM (Name, Kind)
tcStandaloneKindSig (L _ kisig) = case kisig of
StandaloneKindSig _ (L _ name) ksig ->
let ctxt = StandaloneKindSigCtxt name in
addSigCtxt ctxt (hsSigType ksig) $
do { kind <- tcTopLHsType kindLevelMode ksig (expectedKindInCtxt ctxt)
; checkValidType ctxt kind
; return (name, kind) }
XStandaloneKindSig nec -> noExtCon nec
tc_hs_sig_type :: SkolemInfo -> LHsSigType GhcRn
-> ContextKind -> TcM (Bool, TcType)
-- Kind-checks/desugars an 'LHsSigType',
-- solve equalities,
-- and then kind-generalizes.
-- This will never emit constraints, as it uses solveEqualities internally.
-- No validity checking or zonking
-- Returns also a Bool indicating whether the type induced an insoluble constraint;
-- True <=> constraint is insoluble
tc_hs_sig_type skol_info hs_sig_type ctxt_kind
| HsIB { hsib_ext = sig_vars, hsib_body = hs_ty } <- hs_sig_type
= do { (tc_lvl, (wanted, (spec_tkvs, ty)))
<- pushTcLevelM $
solveLocalEqualitiesX "tc_hs_sig_type" $
bindImplicitTKBndrs_Skol sig_vars $
do { kind <- newExpectedKind ctxt_kind
; tc_lhs_type typeLevelMode hs_ty kind }
-- Any remaining variables (unsolved in the solveLocalEqualities)
-- should be in the global tyvars, and therefore won't be quantified
; spec_tkvs <- zonkAndScopedSort spec_tkvs
; let ty1 = mkSpecForAllTys spec_tkvs ty
-- This bit is very much like decideMonoTyVars in TcSimplify,
-- but constraints are so much simpler in kinds, it is much
-- easier here. (In particular, we never quantify over a
-- constraint in a type.)
; constrained <- zonkTyCoVarsAndFV (tyCoVarsOfWC wanted)
; let should_gen = not . (`elemVarSet` constrained)
; kvs <- kindGeneralizeSome should_gen ty1
; emitResidualTvConstraint skol_info Nothing (kvs ++ spec_tkvs)
tc_lvl wanted
; return (insolubleWC wanted, mkInvForAllTys kvs ty1) }
tc_hs_sig_type _ (XHsImplicitBndrs nec) _ = noExtCon nec
tcTopLHsType :: TcTyMode -> LHsSigType GhcRn -> ContextKind -> TcM Type
-- tcTopLHsType is used for kind-checking top-level HsType where
-- we want to fully solve /all/ equalities, and report errors
-- Does zonking, but not validity checking because it's used
-- for things (like deriving and instances) that aren't
-- ordinary types
tcTopLHsType mode hs_sig_type ctxt_kind
| HsIB { hsib_ext = sig_vars, hsib_body = hs_ty } <- hs_sig_type
= do { traceTc "tcTopLHsType {" (ppr hs_ty)
; (spec_tkvs, ty)
<- pushTcLevelM_ $
solveEqualities $
bindImplicitTKBndrs_Skol sig_vars $
do { kind <- newExpectedKind ctxt_kind
; tc_lhs_type mode hs_ty kind }
; spec_tkvs <- zonkAndScopedSort spec_tkvs
; let ty1 = mkSpecForAllTys spec_tkvs ty
; kvs <- kindGeneralizeAll ty1 -- "All" because it's a top-level type
; final_ty <- zonkTcTypeToType (mkInvForAllTys kvs ty1)
; traceTc "End tcTopLHsType }" (vcat [ppr hs_ty, ppr final_ty])
; return final_ty}
tcTopLHsType _ (XHsImplicitBndrs nec) _ = noExtCon nec
-----------------
tcHsDeriv :: LHsSigType GhcRn -> TcM ([TyVar], Class, [Type], [Kind])
-- Like tcHsSigType, but for the ...deriving( C t1 ty2 ) clause
-- Returns the C, [ty1, ty2, and the kinds of C's remaining arguments
-- E.g. class C (a::*) (b::k->k)
-- data T a b = ... deriving( C Int )
-- returns ([k], C, [k, Int], [k->k])
-- Return values are fully zonked
tcHsDeriv hs_ty
= do { ty <- checkNoErrs $ -- Avoid redundant error report
-- with "illegal deriving", below
tcTopLHsType typeLevelMode hs_ty AnyKind
; let (tvs, pred) = splitForAllTys ty
(kind_args, _) = splitFunTys (tcTypeKind pred)
; case getClassPredTys_maybe pred of
Just (cls, tys) -> return (tvs, cls, tys, kind_args)
Nothing -> failWithTc (text "Illegal deriving item" <+> quotes (ppr hs_ty)) }
-- | Typecheck a deriving strategy. For most deriving strategies, this is a
-- no-op, but for the @via@ strategy, this requires typechecking the @via@ type.
tcDerivStrategy ::
Maybe (LDerivStrategy GhcRn)
-- ^ The deriving strategy
-> TcM (Maybe (LDerivStrategy GhcTc), [TyVar])
-- ^ The typechecked deriving strategy and the tyvars that it binds
-- (if using 'ViaStrategy').
tcDerivStrategy mb_lds
= case mb_lds of
Nothing -> boring_case Nothing
Just (L loc ds) ->
setSrcSpan loc $ do
(ds', tvs) <- tc_deriv_strategy ds
pure (Just (L loc ds'), tvs)
where
tc_deriv_strategy :: DerivStrategy GhcRn
-> TcM (DerivStrategy GhcTc, [TyVar])
tc_deriv_strategy StockStrategy = boring_case StockStrategy
tc_deriv_strategy AnyclassStrategy = boring_case AnyclassStrategy
tc_deriv_strategy NewtypeStrategy = boring_case NewtypeStrategy
tc_deriv_strategy (ViaStrategy ty) = do
ty' <- checkNoErrs $ tcTopLHsType typeLevelMode ty AnyKind
let (via_tvs, via_pred) = splitForAllTys ty'
pure (ViaStrategy via_pred, via_tvs)
boring_case :: ds -> TcM (ds, [TyVar])
boring_case ds = pure (ds, [])
tcHsClsInstType :: UserTypeCtxt -- InstDeclCtxt or SpecInstCtxt
-> LHsSigType GhcRn
-> TcM Type
-- Like tcHsSigType, but for a class instance declaration
tcHsClsInstType user_ctxt hs_inst_ty
= setSrcSpan (getLoc (hsSigType hs_inst_ty)) $
do { -- Fail eagerly if tcTopLHsType fails. We are at top level so
-- these constraints will never be solved later. And failing
-- eagerly avoids follow-on errors when checkValidInstance
-- sees an unsolved coercion hole
inst_ty <- checkNoErrs $
tcTopLHsType typeLevelMode hs_inst_ty (TheKind constraintKind)
; checkValidInstance user_ctxt hs_inst_ty inst_ty
; return inst_ty }
----------------------------------------------
-- | Type-check a visible type application
tcHsTypeApp :: LHsWcType GhcRn -> Kind -> TcM Type
-- See Note [Recipe for checking a signature] in TcHsType
tcHsTypeApp wc_ty kind
| HsWC { hswc_ext = sig_wcs, hswc_body = hs_ty } <- wc_ty
= do { ty <- solveLocalEqualities "tcHsTypeApp" $
-- We are looking at a user-written type, very like a
-- signature so we want to solve its equalities right now
unsetWOptM Opt_WarnPartialTypeSignatures $
setXOptM LangExt.PartialTypeSignatures $
-- See Note [Wildcards in visible type application]
tcNamedWildCardBinders sig_wcs $ \ _ ->
tcCheckLHsType hs_ty kind
-- We do not kind-generalize type applications: we just
-- instantiate with exactly what the user says.
-- See Note [No generalization in type application]
-- We still must call kindGeneralizeNone, though, according
-- to Note [Recipe for checking a signature]
; kindGeneralizeNone ty
; ty <- zonkTcType ty
; checkValidType TypeAppCtxt ty
; return ty }
tcHsTypeApp (XHsWildCardBndrs nec) _ = noExtCon nec
{- Note [Wildcards in visible type application]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A HsWildCardBndrs's hswc_ext now only includes /named/ wildcards, so
any unnamed wildcards stay unchanged in hswc_body. When called in
tcHsTypeApp, tcCheckLHsType will call emitAnonWildCardHoleConstraint
on these anonymous wildcards. However, this would trigger
error/warning when an anonymous wildcard is passed in as a visible type
argument, which we do not want because users should be able to write
@_ to skip a instantiating a type variable variable without fuss. The
solution is to switch the PartialTypeSignatures flags here to let the
typechecker know that it's checking a '@_' and do not emit hole
constraints on it. See related Note [Wildcards in visible kind
application] and Note [The wildcard story for types] in GHC.Hs.Types
Ugh!
Note [No generalization in type application]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We do not kind-generalize type applications. Imagine
id @(Proxy Nothing)
If we kind-generalized, we would get
id @(forall {k}. Proxy @(Maybe k) (Nothing @k))
which is very sneakily impredicative instantiation.
There is also the possibility of mentioning a wildcard
(`id @(Proxy _)`), which definitely should not be kind-generalized.
-}
{-
************************************************************************
* *
The main kind checker: no validity checks here
* *
************************************************************************
-}
---------------------------
tcHsOpenType, tcHsLiftedType,
tcHsOpenTypeNC, tcHsLiftedTypeNC :: LHsType GhcRn -> TcM TcType
-- Used for type signatures
-- Do not do validity checking
tcHsOpenType ty = addTypeCtxt ty $ tcHsOpenTypeNC ty
tcHsLiftedType ty = addTypeCtxt ty $ tcHsLiftedTypeNC ty
tcHsOpenTypeNC ty = do { ek <- newOpenTypeKind
; tc_lhs_type typeLevelMode ty ek }
tcHsLiftedTypeNC ty = tc_lhs_type typeLevelMode ty liftedTypeKind
-- Like tcHsType, but takes an expected kind
tcCheckLHsType :: LHsType GhcRn -> Kind -> TcM TcType
tcCheckLHsType hs_ty exp_kind
= addTypeCtxt hs_ty $
tc_lhs_type typeLevelMode hs_ty exp_kind
tcLHsType :: LHsType GhcRn -> TcM (TcType, TcKind)
-- Called from outside: set the context
tcLHsType ty = addTypeCtxt ty (tc_infer_lhs_type typeLevelMode ty)
-- Like tcLHsType, but use it in a context where type synonyms and type families
-- do not need to be saturated, like in a GHCi :kind call
tcLHsTypeUnsaturated :: LHsType GhcRn -> TcM (TcType, TcKind)
tcLHsTypeUnsaturated hs_ty
| Just (hs_fun_ty, hs_args) <- splitHsAppTys (unLoc hs_ty)
= addTypeCtxt hs_ty $
do { (fun_ty, _ki) <- tcInferAppHead mode hs_fun_ty
; tcInferApps_nosat mode hs_fun_ty fun_ty hs_args }
-- Notice the 'nosat'; do not instantiate trailing
-- invisible arguments of a type family.
-- See Note [Dealing with :kind]
| otherwise
= addTypeCtxt hs_ty $
tc_infer_lhs_type mode hs_ty
where
mode = typeLevelMode
{- Note [Dealing with :kind]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this GHCi command
ghci> type family F :: Either j k
ghci> :kind F
F :: forall {j,k}. Either j k
We will only get the 'forall' if we /refrain/ from saturating those
invisible binders. But generally we /do/ saturate those invisible
binders (see tcInferApps), and we want to do so for nested application
even in GHCi. Consider for example (#16287)
ghci> type family F :: k
ghci> data T :: (forall k. k) -> Type
ghci> :kind T F
We want to reject this. It's just at the very top level that we want
to switch off saturation.
So tcLHsTypeUnsaturated does a little special case for top level
applications. Actually the common case is a bare variable, as above.
************************************************************************
* *
Type-checking modes
* *
************************************************************************
The kind-checker is parameterised by a TcTyMode, which contains some
information about where we're checking a type.
The renamer issues errors about what it can. All errors issued here must
concern things that the renamer can't handle.
-}
-- | Info about the context in which we're checking a type. Currently,
-- differentiates only between types and kinds, but this will likely
-- grow, at least to include the distinction between patterns and
-- not-patterns.
--
-- To find out where the mode is used, search for 'mode_level'
data TcTyMode = TcTyMode { mode_level :: TypeOrKind }
typeLevelMode :: TcTyMode
typeLevelMode = TcTyMode { mode_level = TypeLevel }
kindLevelMode :: TcTyMode
kindLevelMode = TcTyMode { mode_level = KindLevel }
-- switch to kind level
kindLevel :: TcTyMode -> TcTyMode
kindLevel mode = mode { mode_level = KindLevel }
instance Outputable TcTyMode where
ppr = ppr . mode_level
{-
Note [Bidirectional type checking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In expressions, whenever we see a polymorphic identifier, say `id`, we are
free to instantiate it with metavariables, knowing that we can always
re-generalize with type-lambdas when necessary. For example:
rank2 :: (forall a. a -> a) -> ()
x = rank2 id
When checking the body of `x`, we can instantiate `id` with a metavariable.
Then, when we're checking the application of `rank2`, we notice that we really
need a polymorphic `id`, and then re-generalize over the unconstrained
metavariable.
In types, however, we're not so lucky, because *we cannot re-generalize*!
There is no lambda. So, we must be careful only to instantiate at the last
possible moment, when we're sure we're never going to want the lost polymorphism
again. This is done in calls to tcInstInvisibleTyBinders.
To implement this behavior, we use bidirectional type checking, where we
explicitly think about whether we know the kind of the type we're checking
or not. Note that there is a difference between not knowing a kind and
knowing a metavariable kind: the metavariables are TauTvs, and cannot become
forall-quantified kinds. Previously (before dependent types), there were
no higher-rank kinds, and so we could instantiate early and be sure that
no types would have polymorphic kinds, and so we could always assume that
the kind of a type was a fresh metavariable. Not so anymore, thus the
need for two algorithms.
For HsType forms that can never be kind-polymorphic, we implement only the
"down" direction, where we safely assume a metavariable kind. For HsType forms
that *can* be kind-polymorphic, we implement just the "up" (functions with
"infer" in their name) version, as we gain nothing by also implementing the
"down" version.
Note [Future-proofing the type checker]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
As discussed in Note [Bidirectional type checking], each HsType form is
handled in *either* tc_infer_hs_type *or* tc_hs_type. These functions
are mutually recursive, so that either one can work for any type former.
But, we want to make sure that our pattern-matches are complete. So,
we have a bunch of repetitive code just so that we get warnings if we're
missing any patterns.
-}
------------------------------------------
-- | Check and desugar a type, returning the core type and its
-- possibly-polymorphic kind. Much like 'tcInferRho' at the expression
-- level.
tc_infer_lhs_type :: TcTyMode -> LHsType GhcRn -> TcM (TcType, TcKind)
tc_infer_lhs_type mode (L span ty)
= setSrcSpan span $
tc_infer_hs_type mode ty
---------------------------
-- | Call 'tc_infer_hs_type' and check its result against an expected kind.
tc_infer_hs_type_ek :: HasDebugCallStack => TcTyMode -> HsType GhcRn -> TcKind -> TcM TcType
tc_infer_hs_type_ek mode hs_ty ek
= do { (ty, k) <- tc_infer_hs_type mode hs_ty
; checkExpectedKind hs_ty ty k ek }
---------------------------
-- | Infer the kind of a type and desugar. This is the "up" type-checker,
-- as described in Note [Bidirectional type checking]
tc_infer_hs_type :: TcTyMode -> HsType GhcRn -> TcM (TcType, TcKind)
tc_infer_hs_type mode (HsParTy _ t)
= tc_infer_lhs_type mode t
tc_infer_hs_type mode ty
| Just (hs_fun_ty, hs_args) <- splitHsAppTys ty
= do { (fun_ty, _ki) <- tcInferAppHead mode hs_fun_ty
; tcInferApps mode hs_fun_ty fun_ty hs_args }
tc_infer_hs_type mode (HsKindSig _ ty sig)
= do { sig' <- tcLHsKindSig KindSigCtxt sig
-- We must typecheck the kind signature, and solve all
-- its equalities etc; from this point on we may do
-- things like instantiate its foralls, so it needs
-- to be fully determined (#14904)
; traceTc "tc_infer_hs_type:sig" (ppr ty $$ ppr sig')
; ty' <- tc_lhs_type mode ty sig'
; return (ty', sig') }
-- HsSpliced is an annotation produced by 'GHC.Rename.Splice.rnSpliceType' to communicate
-- the splice location to the typechecker. Here we skip over it in order to have
-- the same kind inferred for a given expression whether it was produced from
-- splices or not.
--
-- See Note [Delaying modFinalizers in untyped splices].
tc_infer_hs_type mode (HsSpliceTy _ (HsSpliced _ _ (HsSplicedTy ty)))
= tc_infer_hs_type mode ty
tc_infer_hs_type mode (HsDocTy _ ty _) = tc_infer_lhs_type mode ty
tc_infer_hs_type _ (XHsType (NHsCoreTy ty))
= return (ty, tcTypeKind ty)
tc_infer_hs_type _ (HsExplicitListTy _ _ tys)
| null tys -- this is so that we can use visible kind application with '[]
-- e.g ... '[] @Bool
= return (mkTyConTy promotedNilDataCon,
mkSpecForAllTys [alphaTyVar] $ mkListTy alphaTy)
tc_infer_hs_type mode other_ty
= do { kv <- newMetaKindVar
; ty' <- tc_hs_type mode other_ty kv
; return (ty', kv) }
------------------------------------------
tc_lhs_type :: TcTyMode -> LHsType GhcRn -> TcKind -> TcM TcType
tc_lhs_type mode (L span ty) exp_kind
= setSrcSpan span $
tc_hs_type mode ty exp_kind
tc_hs_type :: TcTyMode -> HsType GhcRn -> TcKind -> TcM TcType
-- See Note [Bidirectional type checking]
tc_hs_type mode (HsParTy _ ty) exp_kind = tc_lhs_type mode ty exp_kind
tc_hs_type mode (HsDocTy _ ty _) exp_kind = tc_lhs_type mode ty exp_kind
tc_hs_type _ ty@(HsBangTy _ bang _) _
-- While top-level bangs at this point are eliminated (eg !(Maybe Int)),
-- other kinds of bangs are not (eg ((!Maybe) Int)). These kinds of
-- bangs are invalid, so fail. (#7210, #14761)
= do { let bangError err = failWith $
text "Unexpected" <+> text err <+> text "annotation:" <+> ppr ty $$
text err <+> text "annotation cannot appear nested inside a type"
; case bang of
HsSrcBang _ SrcUnpack _ -> bangError "UNPACK"
HsSrcBang _ SrcNoUnpack _ -> bangError "NOUNPACK"
HsSrcBang _ NoSrcUnpack SrcLazy -> bangError "laziness"
HsSrcBang _ _ _ -> bangError "strictness" }
tc_hs_type _ ty@(HsRecTy {}) _
-- Record types (which only show up temporarily in constructor
-- signatures) should have been removed by now
= failWithTc (text "Record syntax is illegal here:" <+> ppr ty)
-- HsSpliced is an annotation produced by 'GHC.Rename.Splice.rnSpliceType'.
-- Here we get rid of it and add the finalizers to the global environment
-- while capturing the local environment.
--
-- See Note [Delaying modFinalizers in untyped splices].
tc_hs_type mode (HsSpliceTy _ (HsSpliced _ mod_finalizers (HsSplicedTy ty)))
exp_kind
= do addModFinalizersWithLclEnv mod_finalizers
tc_hs_type mode ty exp_kind
-- This should never happen; type splices are expanded by the renamer
tc_hs_type _ ty@(HsSpliceTy {}) _exp_kind
= failWithTc (text "Unexpected type splice:" <+> ppr ty)
---------- Functions and applications
tc_hs_type mode (HsFunTy _ ty1 ty2) exp_kind
= tc_fun_type mode ty1 ty2 exp_kind
tc_hs_type mode (HsOpTy _ ty1 (L _ op) ty2) exp_kind
| op `hasKey` funTyConKey
= tc_fun_type mode ty1 ty2 exp_kind
--------- Foralls
tc_hs_type mode forall@(HsForAllTy { hst_fvf = fvf, hst_bndrs = hs_tvs
, hst_body = ty }) exp_kind
= do { (tclvl, wanted, (tvs', ty'))
<- pushLevelAndCaptureConstraints $
bindExplicitTKBndrs_Skol hs_tvs $
tc_lhs_type mode ty exp_kind
-- Do not kind-generalise here! See Note [Kind generalisation]
-- Why exp_kind? See Note [Body kind of HsForAllTy]
; let argf = case fvf of
ForallVis -> Required
ForallInvis -> Specified
bndrs = mkTyVarBinders argf tvs'
skol_info = ForAllSkol (ppr forall)
m_telescope = Just (sep (map ppr hs_tvs))
; emitResidualTvConstraint skol_info m_telescope tvs' tclvl wanted
; return (mkForAllTys bndrs ty') }
tc_hs_type mode (HsQualTy { hst_ctxt = ctxt, hst_body = rn_ty }) exp_kind
| null (unLoc ctxt)
= tc_lhs_type mode rn_ty exp_kind
-- See Note [Body kind of a HsQualTy]
| tcIsConstraintKind exp_kind
= do { ctxt' <- tc_hs_context mode ctxt
; ty' <- tc_lhs_type mode rn_ty constraintKind
; return (mkPhiTy ctxt' ty') }
| otherwise
= do { ctxt' <- tc_hs_context mode ctxt
; ek <- newOpenTypeKind -- The body kind (result of the function) can
-- be TYPE r, for any r, hence newOpenTypeKind
; ty' <- tc_lhs_type mode rn_ty ek
; checkExpectedKind (unLoc rn_ty) (mkPhiTy ctxt' ty')
liftedTypeKind exp_kind }
--------- Lists, arrays, and tuples
tc_hs_type mode rn_ty@(HsListTy _ elt_ty) exp_kind
= do { tau_ty <- tc_lhs_type mode elt_ty liftedTypeKind
; checkWiredInTyCon listTyCon
; checkExpectedKind rn_ty (mkListTy tau_ty) liftedTypeKind exp_kind }
-- See Note [Distinguishing tuple kinds] in GHC.Hs.Types
-- See Note [Inferring tuple kinds]
tc_hs_type mode rn_ty@(HsTupleTy _ HsBoxedOrConstraintTuple hs_tys) exp_kind
-- (NB: not zonking before looking at exp_k, to avoid left-right bias)
| Just tup_sort <- tupKindSort_maybe exp_kind
= traceTc "tc_hs_type tuple" (ppr hs_tys) >>
tc_tuple rn_ty mode tup_sort hs_tys exp_kind
| otherwise
= do { traceTc "tc_hs_type tuple 2" (ppr hs_tys)
; (tys, kinds) <- mapAndUnzipM (tc_infer_lhs_type mode) hs_tys
; kinds <- mapM zonkTcType kinds
-- Infer each arg type separately, because errors can be
-- confusing if we give them a shared kind. Eg #7410
-- (Either Int, Int), we do not want to get an error saying
-- "the second argument of a tuple should have kind *->*"
; let (arg_kind, tup_sort)
= case [ (k,s) | k <- kinds
, Just s <- [tupKindSort_maybe k] ] of
((k,s) : _) -> (k,s)
[] -> (liftedTypeKind, BoxedTuple)
-- In the [] case, it's not clear what the kind is, so guess *
; tys' <- sequence [ setSrcSpan loc $
checkExpectedKind hs_ty ty kind arg_kind
| ((L loc hs_ty),ty,kind) <- zip3 hs_tys tys kinds ]
; finish_tuple rn_ty tup_sort tys' (map (const arg_kind) tys') exp_kind }
tc_hs_type mode rn_ty@(HsTupleTy _ hs_tup_sort tys) exp_kind
= tc_tuple rn_ty mode tup_sort tys exp_kind
where
tup_sort = case hs_tup_sort of -- Fourth case dealt with above
HsUnboxedTuple -> UnboxedTuple
HsBoxedTuple -> BoxedTuple
HsConstraintTuple -> ConstraintTuple
_ -> panic "tc_hs_type HsTupleTy"
tc_hs_type mode rn_ty@(HsSumTy _ hs_tys) exp_kind
= do { let arity = length hs_tys
; arg_kinds <- mapM (\_ -> newOpenTypeKind) hs_tys
; tau_tys <- zipWithM (tc_lhs_type mode) hs_tys arg_kinds
; let arg_reps = map kindRep arg_kinds
arg_tys = arg_reps ++ tau_tys
sum_ty = mkTyConApp (sumTyCon arity) arg_tys
sum_kind = unboxedSumKind arg_reps
; checkExpectedKind rn_ty sum_ty sum_kind exp_kind
}
--------- Promoted lists and tuples
tc_hs_type mode rn_ty@(HsExplicitListTy _ _ tys) exp_kind
= do { tks <- mapM (tc_infer_lhs_type mode) tys
; (taus', kind) <- unifyKinds tys tks
; let ty = (foldr (mk_cons kind) (mk_nil kind) taus')
; checkExpectedKind rn_ty ty (mkListTy kind) exp_kind }
where
mk_cons k a b = mkTyConApp (promoteDataCon consDataCon) [k, a, b]
mk_nil k = mkTyConApp (promoteDataCon nilDataCon) [k]
tc_hs_type mode rn_ty@(HsExplicitTupleTy _ tys) exp_kind
-- using newMetaKindVar means that we force instantiations of any polykinded
-- types. At first, I just used tc_infer_lhs_type, but that led to #11255.
= do { ks <- replicateM arity newMetaKindVar
; taus <- zipWithM (tc_lhs_type mode) tys ks
; let kind_con = tupleTyCon Boxed arity
ty_con = promotedTupleDataCon Boxed arity
tup_k = mkTyConApp kind_con ks
; checkExpectedKind rn_ty (mkTyConApp ty_con (ks ++ taus)) tup_k exp_kind }
where
arity = length tys
--------- Constraint types
tc_hs_type mode rn_ty@(HsIParamTy _ (L _ n) ty) exp_kind
= do { MASSERT( isTypeLevel (mode_level mode) )
; ty' <- tc_lhs_type mode ty liftedTypeKind
; let n' = mkStrLitTy $ hsIPNameFS n
; ipClass <- tcLookupClass ipClassName
; checkExpectedKind rn_ty (mkClassPred ipClass [n',ty'])
constraintKind exp_kind }
tc_hs_type _ rn_ty@(HsStarTy _ _) exp_kind
-- Desugaring 'HsStarTy' to 'Data.Kind.Type' here means that we don't have to
-- handle it in 'coreView' and 'tcView'.
= checkExpectedKind rn_ty liftedTypeKind liftedTypeKind exp_kind
--------- Literals
tc_hs_type _ rn_ty@(HsTyLit _ (HsNumTy _ n)) exp_kind
= do { checkWiredInTyCon typeNatKindCon
; checkExpectedKind rn_ty (mkNumLitTy n) typeNatKind exp_kind }
tc_hs_type _ rn_ty@(HsTyLit _ (HsStrTy _ s)) exp_kind
= do { checkWiredInTyCon typeSymbolKindCon
; checkExpectedKind rn_ty (mkStrLitTy s) typeSymbolKind exp_kind }
--------- Potentially kind-polymorphic types: call the "up" checker
-- See Note [Future-proofing the type checker]
tc_hs_type mode ty@(HsTyVar {}) ek = tc_infer_hs_type_ek mode ty ek
tc_hs_type mode ty@(HsAppTy {}) ek = tc_infer_hs_type_ek mode ty ek
tc_hs_type mode ty@(HsAppKindTy{}) ek = tc_infer_hs_type_ek mode ty ek
tc_hs_type mode ty@(HsOpTy {}) ek = tc_infer_hs_type_ek mode ty ek
tc_hs_type mode ty@(HsKindSig {}) ek = tc_infer_hs_type_ek mode ty ek
tc_hs_type mode ty@(XHsType (NHsCoreTy{})) ek = tc_infer_hs_type_ek mode ty ek
tc_hs_type _ wc@(HsWildCardTy _) ek = tcAnonWildCardOcc wc ek
------------------------------------------
tc_fun_type :: TcTyMode -> LHsType GhcRn -> LHsType GhcRn -> TcKind
-> TcM TcType
tc_fun_type mode ty1 ty2 exp_kind = case mode_level mode of
TypeLevel ->
do { arg_k <- newOpenTypeKind
; res_k <- newOpenTypeKind
; ty1' <- tc_lhs_type mode ty1 arg_k
; ty2' <- tc_lhs_type mode ty2 res_k
; checkExpectedKind (HsFunTy noExtField ty1 ty2) (mkVisFunTy ty1' ty2')
liftedTypeKind exp_kind }
KindLevel -> -- no representation polymorphism in kinds. yet.
do { ty1' <- tc_lhs_type mode ty1 liftedTypeKind
; ty2' <- tc_lhs_type mode ty2 liftedTypeKind
; checkExpectedKind (HsFunTy noExtField ty1 ty2) (mkVisFunTy ty1' ty2')
liftedTypeKind exp_kind }
---------------------------
tcAnonWildCardOcc :: HsType GhcRn -> Kind -> TcM TcType
tcAnonWildCardOcc wc exp_kind
= do { wc_tv <- newWildTyVar -- The wildcard's kind will be an un-filled-in meta tyvar
; part_tysig <- xoptM LangExt.PartialTypeSignatures
; warning <- woptM Opt_WarnPartialTypeSignatures
; unless (part_tysig && not warning) $
emitAnonWildCardHoleConstraint wc_tv
-- Why the 'unless' guard?
-- See Note [Wildcards in visible kind application]
; checkExpectedKind wc (mkTyVarTy wc_tv)
(tyVarKind wc_tv) exp_kind }
{- Note [Wildcards in visible kind application]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are cases where users might want to pass in a wildcard as a visible kind
argument, for instance:
data T :: forall k1 k2. k1 → k2 → Type where
MkT :: T a b
x :: T @_ @Nat False n
x = MkT
So we should allow '@_' without emitting any hole constraints, and
regardless of whether PartialTypeSignatures is enabled or not. But how would
the typechecker know which '_' is being used in VKA and which is not when it
calls emitNamedWildCardHoleConstraints in tcHsPartialSigType on all HsWildCardBndrs?
The solution then is to neither rename nor include unnamed wildcards in HsWildCardBndrs,
but instead give every anonymous wildcard a fresh wild tyvar in tcAnonWildCardOcc.
And whenever we see a '@', we automatically turn on PartialTypeSignatures and
turn off hole constraint warnings, and do not call emitAnonWildCardHoleConstraint
under these conditions.
See related Note [Wildcards in visible type application] here and
Note [The wildcard story for types] in GHC.Hs.Types
-}
{- *********************************************************************
* *
Tuples
* *
********************************************************************* -}
---------------------------
tupKindSort_maybe :: TcKind -> Maybe TupleSort
tupKindSort_maybe k
| Just (k', _) <- splitCastTy_maybe k = tupKindSort_maybe k'
| Just k' <- tcView k = tupKindSort_maybe k'
| tcIsConstraintKind k = Just ConstraintTuple
| tcIsLiftedTypeKind k = Just BoxedTuple
| otherwise = Nothing
tc_tuple :: HsType GhcRn -> TcTyMode -> TupleSort -> [LHsType GhcRn] -> TcKind -> TcM TcType
tc_tuple rn_ty mode tup_sort tys exp_kind
= do { arg_kinds <- case tup_sort of
BoxedTuple -> return (replicate arity liftedTypeKind)
UnboxedTuple -> replicateM arity newOpenTypeKind
ConstraintTuple -> return (replicate arity constraintKind)
; tau_tys <- zipWithM (tc_lhs_type mode) tys arg_kinds
; finish_tuple rn_ty tup_sort tau_tys arg_kinds exp_kind }
where
arity = length tys
finish_tuple :: HsType GhcRn
-> TupleSort
-> [TcType] -- ^ argument types
-> [TcKind] -- ^ of these kinds
-> TcKind -- ^ expected kind of the whole tuple
-> TcM TcType
finish_tuple rn_ty tup_sort tau_tys tau_kinds exp_kind = do
traceTc "finish_tuple" (ppr tup_sort $$ ppr tau_kinds $$ ppr exp_kind)
case tup_sort of
ConstraintTuple
| [tau_ty] <- tau_tys
-- Drop any uses of 1-tuple constraints here.
-- See Note [Ignore unary constraint tuples]
-> check_expected_kind tau_ty constraintKind
| arity > mAX_CTUPLE_SIZE
-> failWith (bigConstraintTuple arity)
| otherwise
-> do tycon <- tcLookupTyCon (cTupleTyConName arity)
check_expected_kind (mkTyConApp tycon tau_tys) constraintKind
BoxedTuple -> do
let tycon = tupleTyCon Boxed arity
checkWiredInTyCon tycon
check_expected_kind (mkTyConApp tycon tau_tys) liftedTypeKind
UnboxedTuple ->
let tycon = tupleTyCon Unboxed arity
tau_reps = map kindRep tau_kinds
-- See also Note [Unboxed tuple RuntimeRep vars] in TyCon
arg_tys = tau_reps ++ tau_tys
res_kind = unboxedTupleKind tau_reps in
check_expected_kind (mkTyConApp tycon arg_tys) res_kind
where
arity = length tau_tys
check_expected_kind ty act_kind =
checkExpectedKind rn_ty ty act_kind exp_kind
bigConstraintTuple :: Arity -> MsgDoc
bigConstraintTuple arity
= hang (text "Constraint tuple arity too large:" <+> int arity
<+> parens (text "max arity =" <+> int mAX_CTUPLE_SIZE))
2 (text "Instead, use a nested tuple")
{-
Note [Ignore unary constraint tuples]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
GHC provides unary tuples and unboxed tuples (see Note [One-tuples] in
TysWiredIn) but does *not* provide unary constraint tuples. Why? First,
recall the definition of a unary tuple data type:
data Unit a = Unit a
Note that `Unit a` is *not* the same thing as `a`, since Unit is boxed and
lazy. Therefore, the presence of `Unit` matters semantically. On the other
hand, suppose we had a unary constraint tuple:
class a => Unit% a
This compiles down a newtype (i.e., a cast) in Core, so `Unit% a` is
semantically equivalent to `a`. Therefore, a 1-tuple constraint would have
no user-visible impact, nor would it allow you to express anything that
you couldn't otherwise.
We could simply add Unit% for consistency with tuples (Unit) and unboxed
tuples (Unit#), but that would require even more magic to wire in another
magical class, so we opt not to do so. We must be careful, however, since
one can try to sneak in uses of unary constraint tuples through Template
Haskell, such as in this program (from #17511):
f :: $(pure (ForallT [] [TupleT 1 `AppT` (ConT ''Show `AppT` ConT ''Int)]
(ConT ''String)))
-- f :: Unit% (Show Int) => String
f = "abc"
This use of `TupleT 1` will produce an HsBoxedOrConstraintTuple of arity 1,
and since it is used in a Constraint position, GHC will attempt to treat
it as thought it were a constraint tuple, which can potentially lead to
trouble if one attempts to look up the name of a constraint tuple of arity
1 (as it won't exist). To avoid this trouble, we simply take any unary
constraint tuples discovered when typechecking and drop them—i.e., treat
"Unit% a" as though the user had written "a". This is always safe to do
since the two constraints should be semantically equivalent.
-}
{- *********************************************************************
* *
Type applications
* *
********************************************************************* -}
splitHsAppTys :: HsType GhcRn -> Maybe (LHsType GhcRn, [LHsTypeArg GhcRn])
splitHsAppTys hs_ty
| is_app hs_ty = Just (go (noLoc hs_ty) [])
| otherwise = Nothing
where
is_app :: HsType GhcRn -> Bool
is_app (HsAppKindTy {}) = True
is_app (HsAppTy {}) = True
is_app (HsOpTy _ _ (L _ op) _) = not (op `hasKey` funTyConKey)
-- I'm not sure why this funTyConKey test is necessary
-- Can it even happen? Perhaps for t1 `(->)` t2
-- but then maybe it's ok to treat that like a normal
-- application rather than using the special rule for HsFunTy
is_app (HsTyVar {}) = True
is_app (HsParTy _ (L _ ty)) = is_app ty
is_app _ = False
go (L _ (HsAppTy _ f a)) as = go f (HsValArg a : as)
go (L _ (HsAppKindTy l ty k)) as = go ty (HsTypeArg l k : as)
go (L sp (HsParTy _ f)) as = go f (HsArgPar sp : as)
go (L _ (HsOpTy _ l op@(L sp _) r)) as
= ( L sp (HsTyVar noExtField NotPromoted op)
, HsValArg l : HsValArg r : as )
go f as = (f, as)
---------------------------
tcInferAppHead :: TcTyMode -> LHsType GhcRn -> TcM (TcType, TcKind)
-- Version of tc_infer_lhs_type specialised for the head of an
-- application. In particular, for a HsTyVar (which includes type
-- constructors, it does not zoom off into tcInferApps and family
-- saturation
tcInferAppHead mode (L _ (HsTyVar _ _ (L _ tv)))
= tcTyVar mode tv
tcInferAppHead mode ty
= tc_infer_lhs_type mode ty
---------------------------
-- | Apply a type of a given kind to a list of arguments. This instantiates
-- invisible parameters as necessary. Always consumes all the arguments,
-- using matchExpectedFunKind as necessary.
-- This takes an optional @VarEnv Kind@ which maps kind variables to kinds.-
-- These kinds should be used to instantiate invisible kind variables;
-- they come from an enclosing class for an associated type/data family.
--
-- tcInferApps also arranges to saturate any trailing invisible arguments
-- of a type-family application, which is usually the right thing to do
-- tcInferApps_nosat does not do this saturation; it is used only
-- by ":kind" in GHCi
tcInferApps, tcInferApps_nosat
:: TcTyMode
-> LHsType GhcRn -- ^ Function (for printing only)
-> TcType -- ^ Function
-> [LHsTypeArg GhcRn] -- ^ Args
-> TcM (TcType, TcKind) -- ^ (f args, args, result kind)
tcInferApps mode hs_ty fun hs_args
= do { (f_args, res_k) <- tcInferApps_nosat mode hs_ty fun hs_args
; saturateFamApp f_args res_k }
tcInferApps_nosat mode orig_hs_ty fun orig_hs_args
= do { traceTc "tcInferApps {" (ppr orig_hs_ty $$ ppr orig_hs_args)
; (f_args, res_k) <- go_init 1 fun orig_hs_args
; traceTc "tcInferApps }" (ppr f_args <+> dcolon <+> ppr res_k)
; return (f_args, res_k) }
where
-- go_init just initialises the auxiliary
-- arguments of the 'go' loop
go_init n fun all_args
= go n fun empty_subst fun_ki all_args
where
fun_ki = tcTypeKind fun
-- We do (tcTypeKind fun) here, even though the caller
-- knows the function kind, to absolutely guarantee
-- INVARIANT for 'go'
-- Note that in a typical application (F t1 t2 t3),
-- the 'fun' is just a TyCon, so tcTypeKind is fast
empty_subst = mkEmptyTCvSubst $ mkInScopeSet $
tyCoVarsOfType fun_ki
go :: Int -- The # of the next argument
-> TcType -- Function applied to some args
-> TCvSubst -- Applies to function kind
-> TcKind -- Function kind
-> [LHsTypeArg GhcRn] -- Un-type-checked args
-> TcM (TcType, TcKind) -- Result type and its kind
-- INVARIANT: in any call (go n fun subst fun_ki args)
-- tcTypeKind fun = subst(fun_ki)
-- So the 'subst' and 'fun_ki' arguments are simply
-- there to avoid repeatedly calling tcTypeKind.
--
-- Reason for INVARIANT: to support the Purely Kinded Type Invariant
-- it's important that if fun_ki has a forall, then so does
-- (tcTypeKind fun), because the next thing we are going to do
-- is apply 'fun' to an argument type.
-- Dispatch on all_args first, for performance reasons
go n fun subst fun_ki all_args = case (all_args, tcSplitPiTy_maybe fun_ki) of
---------------- No user-written args left. We're done!
([], _) -> return (fun, substTy subst fun_ki)
---------------- HsArgPar: We don't care about parens here
(HsArgPar _ : args, _) -> go n fun subst fun_ki args
---------------- HsTypeArg: a kind application (fun @ki)
(HsTypeArg _ hs_ki_arg : hs_args, Just (ki_binder, inner_ki)) ->
case ki_binder of
-- FunTy with PredTy on LHS, or ForAllTy with Inferred
Named (Bndr _ Inferred) -> instantiate ki_binder inner_ki
Anon InvisArg _ -> instantiate ki_binder inner_ki
Named (Bndr _ Specified) -> -- Visible kind application
do { traceTc "tcInferApps (vis kind app)"
(vcat [ ppr ki_binder, ppr hs_ki_arg
, ppr (tyBinderType ki_binder)
, ppr subst ])
; let exp_kind = substTy subst $ tyBinderType ki_binder
; ki_arg <- addErrCtxt (funAppCtxt orig_hs_ty hs_ki_arg n) $
unsetWOptM Opt_WarnPartialTypeSignatures $
setXOptM LangExt.PartialTypeSignatures $
-- Urgh! see Note [Wildcards in visible kind application]
-- ToDo: must kill this ridiculous messing with DynFlags
tc_lhs_type (kindLevel mode) hs_ki_arg exp_kind
; traceTc "tcInferApps (vis kind app)" (ppr exp_kind)
; (subst', fun') <- mkAppTyM subst fun ki_binder ki_arg
; go (n+1) fun' subst' inner_ki hs_args }
-- Attempted visible kind application (fun @ki), but fun_ki is
-- forall k -> blah or k1 -> k2
-- So we need a normal application. Error.
_ -> ty_app_err hs_ki_arg $ substTy subst fun_ki
-- No binder; try applying the substitution, or fail if that's not possible
(HsTypeArg _ ki_arg : _, Nothing) -> try_again_after_substing_or $
ty_app_err ki_arg substed_fun_ki
---------------- HsValArg: a normal argument (fun ty)
(HsValArg arg : args, Just (ki_binder, inner_ki))
-- next binder is invisible; need to instantiate it
| isInvisibleBinder ki_binder -- FunTy with InvisArg on LHS;
-- or ForAllTy with Inferred or Specified
-> instantiate ki_binder inner_ki
-- "normal" case
| otherwise
-> do { traceTc "tcInferApps (vis normal app)"
(vcat [ ppr ki_binder
, ppr arg
, ppr (tyBinderType ki_binder)
, ppr subst ])
; let exp_kind = substTy subst $ tyBinderType ki_binder
; arg' <- addErrCtxt (funAppCtxt orig_hs_ty arg n) $
tc_lhs_type mode arg exp_kind
; traceTc "tcInferApps (vis normal app) 2" (ppr exp_kind)
; (subst', fun') <- mkAppTyM subst fun ki_binder arg'
; go (n+1) fun' subst' inner_ki args }
-- no binder; try applying the substitution, or infer another arrow in fun kind
(HsValArg _ : _, Nothing)
-> try_again_after_substing_or $
do { let arrows_needed = n_initial_val_args all_args
; co <- matchExpectedFunKind hs_ty arrows_needed substed_fun_ki
; fun' <- zonkTcType (fun `mkTcCastTy` co)
-- This zonk is essential, to expose the fruits
-- of matchExpectedFunKind to the 'go' loop
; traceTc "tcInferApps (no binder)" $
vcat [ ppr fun <+> dcolon <+> ppr fun_ki
, ppr arrows_needed
, ppr co
, ppr fun' <+> dcolon <+> ppr (tcTypeKind fun')]
; go_init n fun' all_args }
-- Use go_init to establish go's INVARIANT
where
instantiate ki_binder inner_ki
= do { traceTc "tcInferApps (need to instantiate)"
(vcat [ ppr ki_binder, ppr subst])
; (subst', arg') <- tcInstInvisibleTyBinder subst ki_binder
; go n (mkAppTy fun arg') subst' inner_ki all_args }
-- Because tcInvisibleTyBinder instantiate ki_binder,
-- the kind of arg' will have the same shape as the kind
-- of ki_binder. So we don't need mkAppTyM here.
try_again_after_substing_or fallthrough
| not (isEmptyTCvSubst subst)
= go n fun zapped_subst substed_fun_ki all_args
| otherwise
= fallthrough
zapped_subst = zapTCvSubst subst
substed_fun_ki = substTy subst fun_ki
hs_ty = appTypeToArg orig_hs_ty (take (n-1) orig_hs_args)
n_initial_val_args :: [HsArg tm ty] -> Arity
-- Count how many leading HsValArgs we have
n_initial_val_args (HsValArg {} : args) = 1 + n_initial_val_args args
n_initial_val_args (HsArgPar {} : args) = n_initial_val_args args
n_initial_val_args _ = 0
ty_app_err arg ty
= failWith $ text "Cannot apply function of kind" <+> quotes (ppr ty)
$$ text "to visible kind argument" <+> quotes (ppr arg)
mkAppTyM :: TCvSubst
-> TcType -> TyCoBinder -- fun, plus its top-level binder
-> TcType -- arg
-> TcM (TCvSubst, TcType) -- Extended subst, plus (fun arg)
-- Precondition: the application (fun arg) is well-kinded after zonking
-- That is, the application makes sense
--
-- Precondition: for (mkAppTyM subst fun bndr arg)
-- tcTypeKind fun = Pi bndr. body
-- That is, fun always has a ForAllTy or FunTy at the top
-- and 'bndr' is fun's pi-binder
--
-- Postcondition: if fun and arg satisfy (PKTI), the purely-kinded type
-- invariant, then so does the result type (fun arg)
--
-- We do not require that
-- tcTypeKind arg = tyVarKind (binderVar bndr)
-- This must be true after zonking (precondition 1), but it's not
-- required for the (PKTI).
mkAppTyM subst fun ki_binder arg
| -- See Note [mkAppTyM]: Nasty case 2
TyConApp tc args <- fun
, isTypeSynonymTyCon tc
, args `lengthIs` (tyConArity tc - 1)
, any isTrickyTvBinder (tyConTyVars tc) -- We could cache this in the synonym
= do { arg' <- zonkTcType arg
; args' <- zonkTcTypes args
; let subst' = case ki_binder of
Anon {} -> subst
Named (Bndr tv _) -> extendTvSubstAndInScope subst tv arg'
; return (subst', mkTyConApp tc (args' ++ [arg'])) }
mkAppTyM subst fun (Anon {}) arg
= return (subst, mk_app_ty fun arg)
mkAppTyM subst fun (Named (Bndr tv _)) arg
= do { arg' <- if isTrickyTvBinder tv
then -- See Note [mkAppTyM]: Nasty case 1
zonkTcType arg
else return arg
; return ( extendTvSubstAndInScope subst tv arg'
, mk_app_ty fun arg' ) }
mk_app_ty :: TcType -> TcType -> TcType
-- This function just adds an ASSERT for mkAppTyM's precondition
mk_app_ty fun arg
= ASSERT2( isPiTy fun_kind
, ppr fun <+> dcolon <+> ppr fun_kind $$ ppr arg )
mkAppTy fun arg
where
fun_kind = tcTypeKind fun
isTrickyTvBinder :: TcTyVar -> Bool
-- NB: isTrickyTvBinder is just an optimisation
-- It would be absolutely sound to return True always
isTrickyTvBinder tv = isPiTy (tyVarKind tv)
{- Note [The Purely Kinded Type Invariant (PKTI)]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
During type inference, we maintain this invariant
(PKTI) It is legal to call 'tcTypeKind' on any Type ty,
on any sub-term of ty, /without/ zonking ty
Moreover, any such returned kind
will itself satisfy (PKTI)
By "legal to call tcTypeKind" we mean "tcTypeKind will not crash".
The way in which tcTypeKind can crash is in applications
(a t1 t2 .. tn)
if 'a' is a type variable whose kind doesn't have enough arrows
or foralls. (The crash is in piResultTys.)
The loop in tcInferApps has to be very careful to maintain the (PKTI).
For example, suppose
kappa is a unification variable
We have already unified kappa := Type
yielding co :: Refl (Type -> Type)
a :: kappa
then consider the type
(a Int)
If we call tcTypeKind on that, we'll crash, because the (un-zonked)
kind of 'a' is just kappa, not an arrow kind. So we must zonk first.
So the type inference engine is very careful when building applications.
This happens in tcInferApps. Suppose we are kind-checking the type (a Int),
where (a :: kappa). Then in tcInferApps we'll run out of binders on
a's kind, so we'll call matchExpectedFunKind, and unify
kappa := kappa1 -> kappa2, with evidence co :: kappa ~ (kappa1 ~ kappa2)
At this point we must zonk the function type to expose the arrrow, so
that (a Int) will satisfy (PKTI).
The absence of this caused #14174 and #14520.
The calls to mkAppTyM is the other place we are very careful.
Note [mkAppTyM]
~~~~~~~~~~~~~~~
mkAppTyM is trying to guarantee the Purely Kinded Type Invariant
(PKTI) for its result type (fun arg). There are two ways it can go wrong:
* Nasty case 1: forall types (polykinds/T14174a)
T :: forall (p :: *->*). p Int -> p Bool
Now kind-check (T x), where x::kappa.
Well, T and x both satisfy the PKTI, but
T x :: x Int -> x Bool
and (x Int) does /not/ satisfy the PKTI.
* Nasty case 2: type synonyms
type S f a = f a
Even though (S ff aa) would satisfy the (PKTI) if S was a data type
(i.e. nasty case 1 is dealt with), it might still not satisfy (PKTI)
if S is a type synonym, because the /expansion/ of (S ff aa) is
(ff aa), and /that/ does not satisfy (PKTI). E.g. perhaps
(ff :: kappa), where 'kappa' has already been unified with (*->*).
We check for nasty case 2 on the final argument of a type synonym.
Notice that in both cases the trickiness only happens if the
bound variable has a pi-type. Hence isTrickyTvBinder.
-}
saturateFamApp :: TcType -> TcKind -> TcM (TcType, TcKind)
-- Precondition for (saturateFamApp ty kind):
-- tcTypeKind ty = kind
--
-- If 'ty' is an unsaturated family application with trailing
-- invisible arguments, instanttiate them.
-- See Note [saturateFamApp]
saturateFamApp ty kind
| Just (tc, args) <- tcSplitTyConApp_maybe ty
, mustBeSaturated tc
, let n_to_inst = tyConArity tc - length args
= do { (extra_args, ki') <- tcInstInvisibleTyBinders n_to_inst kind
; return (ty `mkTcAppTys` extra_args, ki') }
| otherwise
= return (ty, kind)
{- Note [saturateFamApp]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider
type family F :: Either j k
type instance F @Type = Right Maybe
type instance F @Type = Right Either```
Then F :: forall {j,k}. Either j k
The two type instances do a visible kind application that instantiates
'j' but not 'k'. But we want to end up with instances that look like
type instance F @Type @(*->*) = Right @Type @(*->*) Maybe
so that F has arity 2. We must instantiate that trailing invisible
binder. In general, Invisible binders precede Specified and Required,
so this is only going to bite for apparently-nullary families.
Note that
type family F2 :: forall k. k -> *
is quite different and really does have arity 0.
It's not just type instances where we need to saturate those
unsaturated arguments: see #11246. Hence doing this in tcInferApps.
-}
appTypeToArg :: LHsType GhcRn -> [LHsTypeArg GhcRn] -> LHsType GhcRn
appTypeToArg f [] = f
appTypeToArg f (HsValArg arg : args) = appTypeToArg (mkHsAppTy f arg) args
appTypeToArg f (HsArgPar _ : args) = appTypeToArg f args
appTypeToArg f (HsTypeArg l arg : args)
= appTypeToArg (mkHsAppKindTy l f arg) args
{- *********************************************************************
* *
checkExpectedKind
* *
********************************************************************* -}
-- | This instantiates invisible arguments for the type being checked if it must
-- be saturated and is not yet saturated. It then calls and uses the result
-- from checkExpectedKindX to build the final type
checkExpectedKind :: HasDebugCallStack
=> HsType GhcRn -- ^ type we're checking (for printing)
-> TcType -- ^ type we're checking
-> TcKind -- ^ the known kind of that type
-> TcKind -- ^ the expected kind
-> TcM TcType
-- Just a convenience wrapper to save calls to 'ppr'
checkExpectedKind hs_ty ty act exp
= checkExpectedKind_pp (ppr hs_ty) ty act exp
checkExpectedKind_pp :: HasDebugCallStack
=> SDoc -- ^ The thing we are checking
-> TcType -- ^ type we're checking
-> TcKind -- ^ the known kind of that type
-> TcKind -- ^ the expected kind
-> TcM TcType
checkExpectedKind_pp pp_hs_ty ty act_kind exp_kind
= do { traceTc "checkExpectedKind" (ppr ty $$ ppr act_kind)
; (new_args, act_kind') <- tcInstInvisibleTyBinders n_to_inst act_kind
; let origin = TypeEqOrigin { uo_actual = act_kind'
, uo_expected = exp_kind
, uo_thing = Just pp_hs_ty
, uo_visible = True } -- the hs_ty is visible
; traceTc "checkExpectedKindX" $
vcat [ pp_hs_ty
, text "act_kind':" <+> ppr act_kind'
, text "exp_kind:" <+> ppr exp_kind ]
; let res_ty = ty `mkTcAppTys` new_args
; if act_kind' `tcEqType` exp_kind
then return res_ty -- This is very common
else do { co_k <- uType KindLevel origin act_kind' exp_kind
; traceTc "checkExpectedKind" (vcat [ ppr act_kind
, ppr exp_kind
, ppr co_k ])
; return (res_ty `mkTcCastTy` co_k) } }
where
-- We need to make sure that both kinds have the same number of implicit
-- foralls out front. If the actual kind has more, instantiate accordingly.
-- Otherwise, just pass the type & kind through: the errors are caught
-- in unifyType.
n_exp_invis_bndrs = invisibleTyBndrCount exp_kind
n_act_invis_bndrs = invisibleTyBndrCount act_kind
n_to_inst = n_act_invis_bndrs - n_exp_invis_bndrs
---------------------------
tcHsMbContext :: Maybe (LHsContext GhcRn) -> TcM [PredType]
tcHsMbContext Nothing = return []
tcHsMbContext (Just cxt) = tcHsContext cxt
tcHsContext :: LHsContext GhcRn -> TcM [PredType]
tcHsContext = tc_hs_context typeLevelMode
tcLHsPredType :: LHsType GhcRn -> TcM PredType
tcLHsPredType = tc_lhs_pred typeLevelMode
tc_hs_context :: TcTyMode -> LHsContext GhcRn -> TcM [PredType]
tc_hs_context mode ctxt = mapM (tc_lhs_pred mode) (unLoc ctxt)
tc_lhs_pred :: TcTyMode -> LHsType GhcRn -> TcM PredType
tc_lhs_pred mode pred = tc_lhs_type mode pred constraintKind
---------------------------
tcTyVar :: TcTyMode -> Name -> TcM (TcType, TcKind)
-- See Note [Type checking recursive type and class declarations]
-- in TcTyClsDecls
tcTyVar mode name -- Could be a tyvar, a tycon, or a datacon
= do { traceTc "lk1" (ppr name)
; thing <- tcLookup name
; case thing of
ATyVar _ tv -> return (mkTyVarTy tv, tyVarKind tv)
ATcTyCon tc_tc
-> do { -- See Note [GADT kind self-reference]
unless (isTypeLevel (mode_level mode))
(promotionErr name TyConPE)
; check_tc tc_tc
; return (mkTyConTy tc_tc, tyConKind tc_tc) }
AGlobal (ATyCon tc)
-> do { check_tc tc
; return (mkTyConTy tc, tyConKind tc) }
AGlobal (AConLike (RealDataCon dc))
-> do { data_kinds <- xoptM LangExt.DataKinds
; unless (data_kinds || specialPromotedDc dc) $
promotionErr name NoDataKindsDC
; when (isFamInstTyCon (dataConTyCon dc)) $
-- see #15245
promotionErr name FamDataConPE
; let (_, _, _, theta, _, _) = dataConFullSig dc
; traceTc "tcTyVar" (ppr dc <+> ppr theta $$ ppr (dc_theta_illegal_constraint theta))
; case dc_theta_illegal_constraint theta of
Just pred -> promotionErr name $
ConstrainedDataConPE pred
Nothing -> pure ()
; let tc = promoteDataCon dc
; return (mkTyConApp tc [], tyConKind tc) }
APromotionErr err -> promotionErr name err
_ -> wrongThingErr "type" thing name }
where
check_tc :: TyCon -> TcM ()
check_tc tc = do { data_kinds <- xoptM LangExt.DataKinds
; unless (isTypeLevel (mode_level mode) ||
data_kinds ||
isKindTyCon tc) $
promotionErr name NoDataKindsTC }
-- We cannot promote a data constructor with a context that contains
-- constraints other than equalities, so error if we find one.
-- See Note [Constraints in kinds] in TyCoRep
dc_theta_illegal_constraint :: ThetaType -> Maybe PredType
dc_theta_illegal_constraint = find (not . isEqPred)
{-
Note [GADT kind self-reference]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A promoted type cannot be used in the body of that type's declaration.
#11554 shows this example, which made GHC loop:
import Data.Kind
data P (x :: k) = Q
data A :: Type where
B :: forall (a :: A). P a -> A
In order to check the constructor B, we need to have the promoted type A, but in
order to get that promoted type, B must first be checked. To prevent looping, a
TyConPE promotion error is given when tcTyVar checks an ATcTyCon in kind mode.
Any ATcTyCon is a TyCon being defined in the current recursive group (see data
type decl for TcTyThing), and all such TyCons are illegal in kinds.
#11962 proposes checking the head of a data declaration separately from
its constructors. This would allow the example above to pass.
Note [Body kind of a HsForAllTy]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The body of a forall is usually a type, but in principle
there's no reason to prohibit *unlifted* types.
In fact, GHC can itself construct a function with an
unboxed tuple inside a for-all (via CPR analysis; see
typecheck/should_compile/tc170).
Moreover in instance heads we get forall-types with
kind Constraint.
It's tempting to check that the body kind is either * or #. But this is
wrong. For example:
class C a b
newtype N = Mk Foo deriving (C a)
We're doing newtype-deriving for C. But notice how `a` isn't in scope in
the predicate `C a`. So we quantify, yielding `forall a. C a` even though
`C a` has kind `* -> Constraint`. The `forall a. C a` is a bit cheeky, but
convenient. Bottom line: don't check for * or # here.
Note [Body kind of a HsQualTy]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If ctxt is non-empty, the HsQualTy really is a /function/, so the
kind of the result really is '*', and in that case the kind of the
body-type can be lifted or unlifted.
However, consider
instance Eq a => Eq [a] where ...
or
f :: (Eq a => Eq [a]) => blah
Here both body-kind of the HsQualTy is Constraint rather than *.
Rather crudely we tell the difference by looking at exp_kind. It's
very convenient to typecheck instance types like any other HsSigType.
Admittedly the '(Eq a => Eq [a]) => blah' case is erroneous, but it's
better to reject in checkValidType. If we say that the body kind
should be '*' we risk getting TWO error messages, one saying that Eq
[a] doesn't have kind '*', and one saying that we need a Constraint to
the left of the outer (=>).
How do we figure out the right body kind? Well, it's a bit of a
kludge: I just look at the expected kind. If it's Constraint, we
must be in this instance situation context. It's a kludge because it
wouldn't work if any unification was involved to compute that result
kind -- but it isn't. (The true way might be to use the 'mode'
parameter, but that seemed like a sledgehammer to crack a nut.)
Note [Inferring tuple kinds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Give a tuple type (a,b,c), which the parser labels as HsBoxedOrConstraintTuple,
we try to figure out whether it's a tuple of kind * or Constraint.
Step 1: look at the expected kind
Step 2: infer argument kinds
If after Step 2 it's not clear from the arguments that it's
Constraint, then it must be *. Once having decided that we re-check
the arguments to give good error messages in
e.g. (Maybe, Maybe)
Note that we will still fail to infer the correct kind in this case:
type T a = ((a,a), D a)
type family D :: Constraint -> Constraint
While kind checking T, we do not yet know the kind of D, so we will default the
kind of T to * -> *. It works if we annotate `a` with kind `Constraint`.
Note [Desugaring types]
~~~~~~~~~~~~~~~~~~~~~~~
The type desugarer is phase 2 of dealing with HsTypes. Specifically:
* It transforms from HsType to Type
* It zonks any kinds. The returned type should have no mutable kind
or type variables (hence returning Type not TcType):
- any unconstrained kind variables are defaulted to (Any *) just
as in TcHsSyn.
- there are no mutable type variables because we are
kind-checking a type
Reason: the returned type may be put in a TyCon or DataCon where
it will never subsequently be zonked.
You might worry about nested scopes:
..a:kappa in scope..
let f :: forall b. T '[a,b] -> Int
In this case, f's type could have a mutable kind variable kappa in it;
and we might then default it to (Any *) when dealing with f's type
signature. But we don't expect this to happen because we can't get a
lexically scoped type variable with a mutable kind variable in it. A
delicate point, this. If it becomes an issue we might need to
distinguish top-level from nested uses.
Moreover
* it cannot fail,
* it does no unifications
* it does no validity checking, except for structural matters, such as
(a) spurious ! annotations.
(b) a class used as a type
Note [Kind of a type splice]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider these terms, each with TH type splice inside:
[| e1 :: Maybe $(..blah..) |]
[| e2 :: $(..blah..) |]
When kind-checking the type signature, we'll kind-check the splice
$(..blah..); we want to give it a kind that can fit in any context,
as if $(..blah..) :: forall k. k.
In the e1 example, the context of the splice fixes kappa to *. But
in the e2 example, we'll desugar the type, zonking the kind unification
variables as we go. When we encounter the unconstrained kappa, we
want to default it to '*', not to (Any *).
Help functions for type applications
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-}
addTypeCtxt :: LHsType GhcRn -> TcM a -> TcM a
-- Wrap a context around only if we want to show that contexts.
-- Omit invisible ones and ones user's won't grok
addTypeCtxt (L _ (HsWildCardTy _)) thing = thing -- "In the type '_'" just isn't helpful.
addTypeCtxt (L _ ty) thing
= addErrCtxt doc thing
where
doc = text "In the type" <+> quotes (ppr ty)
{-
************************************************************************
* *
Type-variable binders
%* *
%************************************************************************
Note [Keeping scoped variables in order: Explicit]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the user writes `forall a b c. blah`, we bring a, b, and c into
scope and then check blah. In the process of checking blah, we might
learn the kinds of a, b, and c, and these kinds might indicate that
b depends on c, and thus that we should reject the user-written type.
One approach to doing this would be to bring each of a, b, and c into
scope, one at a time, creating an implication constraint and
bumping the TcLevel for each one. This would work, because the kind
of, say, b would be untouchable when c is in scope (and the constraint
couldn't float out because c blocks it). However, it leads to terrible
error messages, complaining about skolem escape. While it is indeed
a problem of skolem escape, we can do better.
Instead, our approach is to bring the block of variables into scope
all at once, creating one implication constraint for the lot. The
user-written variables are skolems in the implication constraint. In
TcSimplify.setImplicationStatus, we check to make sure that the ordering
is correct, choosing ImplicationStatus IC_BadTelescope if they aren't.
Then, in TcErrors, we report if there is a bad telescope. This way,
we can report a suggested ordering to the user if there is a problem.
See also Note [Checking telescopes] in Constraint
Note [Keeping scoped variables in order: Implicit]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the user implicitly quantifies over variables (say, in a type
signature), we need to come up with some ordering on these variables.
This is done by bumping the TcLevel, bringing the tyvars into scope,
and then type-checking the thing_inside. The constraints are all
wrapped in an implication, which is then solved. Finally, we can
zonk all the binders and then order them with scopedSort.
It's critical to solve before zonking and ordering in order to uncover
any unifications. You might worry that this eager solving could cause
trouble elsewhere. I don't think it will. Because it will solve only
in an increased TcLevel, it can't unify anything that was mentioned
elsewhere. Additionally, we require that the order of implicitly
quantified variables is manifest by the scope of these variables, so
we're not going to learn more information later that will help order
these variables.
Note [Recipe for checking a signature]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Checking a user-written signature requires several steps:
1. Generate constraints.
2. Solve constraints.
3. Promote tyvars and/or kind-generalize.
4. Zonk.
5. Check validity.
There may be some surprises in here:
Step 2 is necessary for two reasons: most signatures also bring
implicitly quantified variables into scope, and solving is necessary
to get these in the right order (see Note [Keeping scoped variables in
order: Implicit]). Additionally, solving is necessary in order to
kind-generalize correctly: otherwise, we do not know which metavariables
are left unsolved.
Step 3 is done by a call to candidateQTyVarsOfType, followed by a call to
kindGeneralize{All,Some,None}. Here, we have to deal with the fact that
metatyvars generated in the type may have a bumped TcLevel, because explicit
foralls raise the TcLevel. To avoid these variables from ever being visible in
the surrounding context, we must obey the following dictum:
Every metavariable in a type must either be
(A) generalized, or
(B) promoted, or See Note [Promotion in signatures]
(C) a cause to error See Note [Naughty quantification candidates] in TcMType
The kindGeneralize functions do not require pre-zonking; they zonk as they
go.
If you are actually doing kind-generalization, you need to bump the level
before generating constraints, as we will only generalize variables with
a TcLevel higher than the ambient one.
After promoting/generalizing, we need to zonk again because both
promoting and generalizing fill in metavariables.
Note [Promotion in signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If an unsolved metavariable in a signature is not generalized
(because we're not generalizing the construct -- e.g., pattern
sig -- or because the metavars are constrained -- see kindGeneralizeSome)
we need to promote to maintain (WantedTvInv) of Note [TcLevel and untouchable type variables]
in TcType. Note that promotion is identical in effect to generalizing
and the reinstantiating with a fresh metavariable at the current level.
So in some sense, we generalize *all* variables, but then re-instantiate
some of them.
Here is an example of why we must promote:
foo (x :: forall a. a -> Proxy b) = ...
In the pattern signature, `b` is unbound, and will thus be brought into
scope. We do not know its kind: it will be assigned kappa[2]. Note that
kappa is at TcLevel 2, because it is invented under a forall. (A priori,
the kind kappa might depend on `a`, so kappa rightly has a higher TcLevel
than the surrounding context.) This kappa cannot be solved for while checking
the pattern signature (which is not kind-generalized). When we are checking
the *body* of foo, though, we need to unify the type of x with the argument
type of bar. At this point, the ambient TcLevel is 1, and spotting a
matavariable with level 2 would violate the (WantedTvInv) invariant of
Note [TcLevel and untouchable type variables]. So, instead of kind-generalizing,
we promote the metavariable to level 1. This is all done in kindGeneralizeNone.
-}
tcNamedWildCardBinders :: [Name]
-> ([(Name, TcTyVar)] -> TcM a)
-> TcM a
-- Bring into scope the /named/ wildcard binders. Remember that
-- plain wildcards _ are anonymous and dealt with by HsWildCardTy
-- Soe Note [The wildcard story for types] in GHC.Hs.Types
tcNamedWildCardBinders wc_names thing_inside
= do { wcs <- mapM (const newWildTyVar) wc_names
; let wc_prs = wc_names `zip` wcs
; tcExtendNameTyVarEnv wc_prs $
thing_inside wc_prs }
newWildTyVar :: TcM TcTyVar
-- ^ New unification variable '_' for a wildcard
newWildTyVar
= do { kind <- newMetaKindVar
; uniq <- newUnique
; details <- newMetaDetails TauTv
; let name = mkSysTvName uniq (fsLit "_")
tyvar = mkTcTyVar name kind details
; traceTc "newWildTyVar" (ppr tyvar)
; return tyvar }
{- *********************************************************************
* *
Kind inference for type declarations
* *
********************************************************************* -}
-- See Note [kcCheckDeclHeader vs kcInferDeclHeader]
data InitialKindStrategy
= InitialKindCheck SAKS_or_CUSK
| InitialKindInfer
-- Does the declaration have a standalone kind signature (SAKS) or a complete
-- user-specified kind (CUSK)?
data SAKS_or_CUSK
= SAKS Kind -- Standalone kind signature, fully zonked! (zonkTcTypeToType)
| CUSK -- Complete user-specified kind (CUSK)
instance Outputable SAKS_or_CUSK where
ppr (SAKS k) = text "SAKS" <+> ppr k
ppr CUSK = text "CUSK"
-- See Note [kcCheckDeclHeader vs kcInferDeclHeader]
kcDeclHeader
:: InitialKindStrategy
-> Name -- ^ of the thing being checked
-> TyConFlavour -- ^ What sort of 'TyCon' is being checked
-> LHsQTyVars GhcRn -- ^ Binders in the header
-> TcM ContextKind -- ^ The result kind
-> TcM TcTyCon -- ^ A suitably-kinded TcTyCon
kcDeclHeader (InitialKindCheck msig) = kcCheckDeclHeader msig
kcDeclHeader InitialKindInfer = kcInferDeclHeader
{- Note [kcCheckDeclHeader vs kcInferDeclHeader]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
kcCheckDeclHeader and kcInferDeclHeader are responsible for getting the initial kind
of a type constructor.
* kcCheckDeclHeader: the TyCon has a standalone kind signature or a CUSK. In that
case, find the full, final, poly-kinded kind of the TyCon. It's very like a
term-level binding where we have a complete type signature for the function.
* kcInferDeclHeader: the TyCon has neither a standalone kind signature nor a
CUSK. Find a monomorphic kind, with unification variables in it; they will be
generalised later. It's very like a term-level binding where we do not have a
type signature (or, more accurately, where we have a partial type signature),
so we infer the type and generalise.
-}
------------------------------
kcCheckDeclHeader
:: SAKS_or_CUSK
-> Name -- ^ of the thing being checked
-> TyConFlavour -- ^ What sort of 'TyCon' is being checked
-> LHsQTyVars GhcRn -- ^ Binders in the header
-> TcM ContextKind -- ^ The result kind. AnyKind == no result signature
-> TcM TcTyCon -- ^ A suitably-kinded generalized TcTyCon
kcCheckDeclHeader (SAKS sig) = kcCheckDeclHeader_sig sig
kcCheckDeclHeader CUSK = kcCheckDeclHeader_cusk
kcCheckDeclHeader_cusk
:: Name -- ^ of the thing being checked
-> TyConFlavour -- ^ What sort of 'TyCon' is being checked
-> LHsQTyVars GhcRn -- ^ Binders in the header
-> TcM ContextKind -- ^ The result kind
-> TcM TcTyCon -- ^ A suitably-kinded generalized TcTyCon
kcCheckDeclHeader_cusk name flav
(HsQTvs { hsq_ext = kv_ns
, hsq_explicit = hs_tvs }) kc_res_ki
-- CUSK case
-- See note [Required, Specified, and Inferred for types] in TcTyClsDecls
= addTyConFlavCtxt name flav $
do { (scoped_kvs, (tc_tvs, res_kind))
<- pushTcLevelM_ $
solveEqualities $
bindImplicitTKBndrs_Q_Skol kv_ns $
bindExplicitTKBndrs_Q_Skol ctxt_kind hs_tvs $
newExpectedKind =<< kc_res_ki
-- Now, because we're in a CUSK,
-- we quantify over the mentioned kind vars
; let spec_req_tkvs = scoped_kvs ++ tc_tvs
all_kinds = res_kind : map tyVarKind spec_req_tkvs
; candidates' <- candidateQTyVarsOfKinds all_kinds
-- 'candidates' are all the variables that we are going to
-- skolemise and then quantify over. We do not include spec_req_tvs
-- because they are /already/ skolems
; let non_tc_candidates = filter (not . isTcTyVar) (nonDetEltsUniqSet (tyCoVarsOfTypes all_kinds))
candidates = candidates' { dv_kvs = dv_kvs candidates' `extendDVarSetList` non_tc_candidates }
inf_candidates = candidates `delCandidates` spec_req_tkvs
; inferred <- quantifyTyVars inf_candidates
-- NB: 'inferred' comes back sorted in dependency order
; scoped_kvs <- mapM zonkTyCoVarKind scoped_kvs
; tc_tvs <- mapM zonkTyCoVarKind tc_tvs
; res_kind <- zonkTcType res_kind
; let mentioned_kv_set = candidateKindVars candidates
specified = scopedSort scoped_kvs
-- NB: maintain the L-R order of scoped_kvs
final_tc_binders = mkNamedTyConBinders Inferred inferred
++ mkNamedTyConBinders Specified specified
++ map (mkRequiredTyConBinder mentioned_kv_set) tc_tvs
all_tv_prs = mkTyVarNamePairs (scoped_kvs ++ tc_tvs)
tycon = mkTcTyCon name final_tc_binders res_kind all_tv_prs
True -- it is generalised
flav
-- If the ordering from
-- Note [Required, Specified, and Inferred for types] in TcTyClsDecls
-- doesn't work, we catch it here, before an error cascade
; checkTyConTelescope tycon
; traceTc "kcCheckDeclHeader_cusk " $
vcat [ text "name" <+> ppr name
, text "kv_ns" <+> ppr kv_ns
, text "hs_tvs" <+> ppr hs_tvs
, text "scoped_kvs" <+> ppr scoped_kvs
, text "tc_tvs" <+> ppr tc_tvs
, text "res_kind" <+> ppr res_kind
, text "candidates" <+> ppr candidates
, text "inferred" <+> ppr inferred
, text "specified" <+> ppr specified
, text "final_tc_binders" <+> ppr final_tc_binders
, text "mkTyConKind final_tc_bndrs res_kind"
<+> ppr (mkTyConKind final_tc_binders res_kind)
, text "all_tv_prs" <+> ppr all_tv_prs ]
; return tycon }
where
ctxt_kind | tcFlavourIsOpen flav = TheKind liftedTypeKind
| otherwise = AnyKind
kcCheckDeclHeader_cusk _ _ (XLHsQTyVars nec) _ = noExtCon nec
-- | Kind-check a 'LHsQTyVars'. Used in 'inferInitialKind' (for tycon kinds and
-- other kinds).
--
-- This function does not do telescope checking.
kcInferDeclHeader
:: Name -- ^ of the thing being checked
-> TyConFlavour -- ^ What sort of 'TyCon' is being checked
-> LHsQTyVars GhcRn
-> TcM ContextKind -- ^ The result kind
-> TcM TcTyCon -- ^ A suitably-kinded non-generalized TcTyCon
kcInferDeclHeader name flav
(HsQTvs { hsq_ext = kv_ns
, hsq_explicit = hs_tvs }) kc_res_ki
-- No standalane kind signature and no CUSK.
-- See note [Required, Specified, and Inferred for types] in TcTyClsDecls
= addTyConFlavCtxt name flav $
do { (scoped_kvs, (tc_tvs, res_kind))
-- Why bindImplicitTKBndrs_Q_Tv which uses newTyVarTyVar?
-- See Note [Inferring kinds for type declarations] in TcTyClsDecls
<- bindImplicitTKBndrs_Q_Tv kv_ns $
bindExplicitTKBndrs_Q_Tv ctxt_kind hs_tvs $
newExpectedKind =<< kc_res_ki
-- Why "_Tv" not "_Skol"? See third wrinkle in
-- Note [Inferring kinds for type declarations] in TcTyClsDecls,
; let -- NB: Don't add scoped_kvs to tyConTyVars, because they
-- might unify with kind vars in other types in a mutually
-- recursive group.
-- See Note [Inferring kinds for type declarations] in TcTyClsDecls
tc_binders = mkAnonTyConBinders VisArg tc_tvs
-- Also, note that tc_binders has the tyvars from only the
-- user-written tyvarbinders. See S1 in Note [How TcTyCons work]
-- in TcTyClsDecls
--
-- mkAnonTyConBinder: see Note [No polymorphic recursion]
all_tv_prs = mkTyVarNamePairs (scoped_kvs ++ tc_tvs)
-- NB: bindExplicitTKBndrs_Q_Tv does not clone;
-- ditto Implicit
-- See Note [Non-cloning for tyvar binders]
tycon = mkTcTyCon name tc_binders res_kind all_tv_prs
False -- not yet generalised
flav
; traceTc "kcInferDeclHeader: not-cusk" $
vcat [ ppr name, ppr kv_ns, ppr hs_tvs
, ppr scoped_kvs
, ppr tc_tvs, ppr (mkTyConKind tc_binders res_kind) ]
; return tycon }
where
ctxt_kind | tcFlavourIsOpen flav = TheKind liftedTypeKind
| otherwise = AnyKind
kcInferDeclHeader _ _ (XLHsQTyVars nec) _ = noExtCon nec
-- | Kind-check a declaration header against a standalone kind signature.
-- See Note [Arity inference in kcCheckDeclHeader_sig]
kcCheckDeclHeader_sig
:: Kind -- ^ Standalone kind signature, fully zonked! (zonkTcTypeToType)
-> Name -- ^ of the thing being checked
-> TyConFlavour -- ^ What sort of 'TyCon' is being checked
-> LHsQTyVars GhcRn -- ^ Binders in the header
-> TcM ContextKind -- ^ The result kind. AnyKind == no result signature
-> TcM TcTyCon -- ^ A suitably-kinded TcTyCon
kcCheckDeclHeader_sig kisig name flav
(HsQTvs { hsq_ext = implicit_nms
, hsq_explicit = explicit_nms }) kc_res_ki
= addTyConFlavCtxt name flav $
do { -- Step 1: zip user-written binders with quantifiers from the kind signature.
-- For example:
--
-- type F :: forall k -> k -> forall j. j -> Type
-- data F i a b = ...
--
-- Results in the following 'zipped_binders':
--
-- TyBinder LHsTyVarBndr
-- ---------------------------------------
-- ZippedBinder forall k -> i
-- ZippedBinder k -> a
-- ZippedBinder forall j.
-- ZippedBinder j -> b
--
let (zipped_binders, excess_bndrs, kisig') = zipBinders kisig explicit_nms
-- Report binders that don't have a corresponding quantifier.
-- For example:
--
-- type T :: Type -> Type
-- data T b1 b2 b3 = ...
--
-- Here, b1 is zipped with Type->, while b2 and b3 are excess binders.
--
; unless (null excess_bndrs) $ failWithTc (tooManyBindersErr kisig' excess_bndrs)
-- Convert each ZippedBinder to TyConBinder for tyConBinders
-- and to [(Name, TcTyVar)] for tcTyConScopedTyVars
; (vis_tcbs, concat -> explicit_tv_prs) <- mapAndUnzipM zipped_to_tcb zipped_binders
; (implicit_tvs, (invis_binders, r_ki))
<- pushTcLevelM_ $
solveEqualities $ -- #16687
bindImplicitTKBndrs_Tv implicit_nms $
tcExtendNameTyVarEnv explicit_tv_prs $
do { -- Check that inline kind annotations on binders are valid.
-- For example:
--
-- type T :: Maybe k -> Type
-- data T (a :: Maybe j) = ...
--
-- Here we unify Maybe k ~ Maybe j
mapM_ check_zipped_binder zipped_binders
-- Kind-check the result kind annotation, if present:
--
-- data T a b :: res_ki where
-- ^^^^^^^^^
-- We do it here because at this point the environment has been
-- extended with both 'implicit_tcv_prs' and 'explicit_tv_prs'.
; ctx_k <- kc_res_ki
; m_res_ki <- case ctx_k of
AnyKind -> return Nothing
_ -> Just <$> newExpectedKind ctx_k
-- Step 2: split off invisible binders.
-- For example:
--
-- type F :: forall k1 k2. (k1, k2) -> Type
-- type family F
--
-- Does 'forall k1 k2' become a part of 'tyConBinders' or 'tyConResKind'?
-- See Note [Arity inference in kcCheckDeclHeader_sig]
; let (invis_binders, r_ki) = split_invis kisig' m_res_ki
-- Check that the inline result kind annotation is valid.
-- For example:
--
-- type T :: Type -> Maybe k
-- type family T a :: Maybe j where
--
-- Here we unify Maybe k ~ Maybe j
; whenIsJust m_res_ki $ \res_ki ->
discardResult $ -- See Note [discardResult in kcCheckDeclHeader_sig]
unifyKind Nothing r_ki res_ki
; return (invis_binders, r_ki) }
-- Zonk the implicitly quantified variables.
; implicit_tvs <- mapM zonkTcTyVarToTyVar implicit_tvs
-- Convert each invisible TyCoBinder to TyConBinder for tyConBinders.
; invis_tcbs <- mapM invis_to_tcb invis_binders
-- Build the final, generalized TcTyCon
; let tcbs = vis_tcbs ++ invis_tcbs
implicit_tv_prs = implicit_nms `zip` implicit_tvs
all_tv_prs = implicit_tv_prs ++ explicit_tv_prs
tc = mkTcTyCon name tcbs r_ki all_tv_prs True flav
; traceTc "kcCheckDeclHeader_sig done:" $ vcat
[ text "tyConName = " <+> ppr (tyConName tc)
, text "kisig =" <+> debugPprType kisig
, text "tyConKind =" <+> debugPprType (tyConKind tc)
, text "tyConBinders = " <+> ppr (tyConBinders tc)
, text "tcTyConScopedTyVars" <+> ppr (tcTyConScopedTyVars tc)
, text "tyConResKind" <+> debugPprType (tyConResKind tc)
]
; return tc }
where
-- Consider this declaration:
--
-- type T :: forall a. forall b -> (a~b) => Proxy a -> Type
-- data T x p = MkT
--
-- Here, we have every possible variant of ZippedBinder:
--
-- TyBinder LHsTyVarBndr
-- ----------------------------------------------
-- ZippedBinder forall {k}.
-- ZippedBinder forall (a::k).
-- ZippedBinder forall (b::k) -> x
-- ZippedBinder (a~b) =>
-- ZippedBinder Proxy a -> p
--
-- Given a ZippedBinder zipped_to_tcb produces:
--
-- * TyConBinder for tyConBinders
-- * (Name, TcTyVar) for tcTyConScopedTyVars, if there's a user-written LHsTyVarBndr
--
zipped_to_tcb :: ZippedBinder -> TcM (TyConBinder, [(Name, TcTyVar)])
zipped_to_tcb zb = case zb of
-- Inferred variable, no user-written binder.
-- Example: forall {k}.
ZippedBinder (Named (Bndr v Specified)) Nothing ->
return (mkNamedTyConBinder Specified v, [])
-- Specified variable, no user-written binder.
-- Example: forall (a::k).
ZippedBinder (Named (Bndr v Inferred)) Nothing ->
return (mkNamedTyConBinder Inferred v, [])
-- Constraint, no user-written binder.
-- Example: (a~b) =>
ZippedBinder (Anon InvisArg bndr_ki) Nothing -> do
name <- newSysName (mkTyVarOccFS (fsLit "ev"))
let tv = mkTyVar name bndr_ki
return (mkAnonTyConBinder InvisArg tv, [])
-- Non-dependent visible argument with a user-written binder.
-- Example: Proxy a ->
ZippedBinder (Anon VisArg bndr_ki) (Just b) ->
return $
let v_name = getName b
tv = mkTyVar v_name bndr_ki
tcb = mkAnonTyConBinder VisArg tv
in (tcb, [(v_name, tv)])
-- Dependent visible argument with a user-written binder.
-- Example: forall (b::k) ->
ZippedBinder (Named (Bndr v Required)) (Just b) ->
return $
let v_name = getName b
tcb = mkNamedTyConBinder Required v
in (tcb, [(v_name, v)])
-- 'zipBinders' does not produce any other variants of ZippedBinder.
_ -> panic "goVis: invalid ZippedBinder"
-- Given an invisible binder that comes from 'split_invis',
-- convert it to TyConBinder.
invis_to_tcb :: TyCoBinder -> TcM TyConBinder
invis_to_tcb tb = do
(tcb, stv) <- zipped_to_tcb (ZippedBinder tb Nothing)
MASSERT(null stv)
return tcb
-- Check that the inline kind annotation on a binder is valid
-- by unifying it with the kind of the quantifier.
check_zipped_binder :: ZippedBinder -> TcM ()
check_zipped_binder (ZippedBinder _ Nothing) = return ()
check_zipped_binder (ZippedBinder tb (Just b)) =
case unLoc b of
UserTyVar _ _ -> return ()
KindedTyVar _ v v_hs_ki -> do
v_ki <- tcLHsKindSig (TyVarBndrKindCtxt (unLoc v)) v_hs_ki
discardResult $ -- See Note [discardResult in kcCheckDeclHeader_sig]
unifyKind (Just (HsTyVar noExtField NotPromoted v))
(tyBinderType tb)
v_ki
XTyVarBndr nec -> noExtCon nec
-- Split the invisible binders that should become a part of 'tyConBinders'
-- rather than 'tyConResKind'.
-- See Note [Arity inference in kcCheckDeclHeader_sig]
split_invis :: Kind -> Maybe Kind -> ([TyCoBinder], Kind)
split_invis sig_ki Nothing =
-- instantiate all invisible binders
splitPiTysInvisible sig_ki
split_invis sig_ki (Just res_ki) =
-- subtraction a la checkExpectedKind
let n_res_invis_bndrs = invisibleTyBndrCount res_ki
n_sig_invis_bndrs = invisibleTyBndrCount sig_ki
n_inst = n_sig_invis_bndrs - n_res_invis_bndrs
in splitPiTysInvisibleN n_inst sig_ki
kcCheckDeclHeader_sig _ _ _ (XLHsQTyVars nec) _ = noExtCon nec
-- A quantifier from a kind signature zipped with a user-written binder for it.
data ZippedBinder =
ZippedBinder TyBinder (Maybe (LHsTyVarBndr GhcRn))
-- See Note [Arity inference in kcCheckDeclHeader_sig]
zipBinders
:: Kind -- kind signature
-> [LHsTyVarBndr GhcRn] -- user-written binders
-> ([ZippedBinder], -- zipped binders
[LHsTyVarBndr GhcRn], -- remaining user-written binders
Kind) -- remainder of the kind signature
zipBinders = zip_binders []
where
zip_binders acc ki [] = (reverse acc, [], ki)
zip_binders acc ki (b:bs) =
case tcSplitPiTy_maybe ki of
Nothing -> (reverse acc, b:bs, ki)
Just (tb, ki') ->
let
(zb, bs') | zippable = (ZippedBinder tb (Just b), bs)
| otherwise = (ZippedBinder tb Nothing, b:bs)
zippable =
case tb of
Named (Bndr _ Specified) -> False
Named (Bndr _ Inferred) -> False
Named (Bndr _ Required) -> True
Anon InvisArg _ -> False
Anon VisArg _ -> True
in
zip_binders (zb:acc) ki' bs'
tooManyBindersErr :: Kind -> [LHsTyVarBndr GhcRn] -> SDoc
tooManyBindersErr ki bndrs =
hang (text "Not a function kind:")
4 (ppr ki) $$
hang (text "but extra binders found:")
4 (fsep (map ppr bndrs))
{- Note [Arity inference in kcCheckDeclHeader_sig]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Given a kind signature 'kisig' and a declaration header, kcCheckDeclHeader_sig
verifies that the declaration conforms to the signature. The end result is a
TcTyCon 'tc' such that:
tyConKind tc == kisig
This TcTyCon would be rather easy to produce if we didn't have to worry about
arity. Consider these declarations:
type family S1 :: forall k. k -> Type
type family S2 (a :: k) :: Type
Both S1 and S2 can be given the same standalone kind signature:
type S2 :: forall k. k -> Type
And, indeed, tyConKind S1 == tyConKind S2. However, tyConKind is built from
tyConBinders and tyConResKind, such that
tyConKind tc == mkTyConKind (tyConBinders tc) (tyConResKind tc)
For S1 and S2, tyConBinders and tyConResKind are different:
tyConBinders S1 == []
tyConResKind S1 == forall k. k -> Type
tyConKind S1 == forall k. k -> Type
tyConBinders S2 == [spec k, anon-vis (a :: k)]
tyConResKind S2 == Type
tyConKind S1 == forall k. k -> Type
This difference determines the arity:
tyConArity tc == length (tyConBinders tc)
That is, the arity of S1 is 0, while the arity of S2 is 2.
'kcCheckDeclHeader_sig' needs to infer the desired arity to split the standalone
kind signature into binders and the result kind. It does so in two rounds:
1. zip user-written binders (vis_tcbs)
2. split off invisible binders (invis_tcbs)
Consider the following declarations:
type F :: Type -> forall j. j -> forall k1 k2. (k1, k2) -> Type
type family F a b
type G :: Type -> forall j. j -> forall k1 k2. (k1, k2) -> Type
type family G a b :: forall r2. (r1, r2) -> Type
In step 1 (zip user-written binders), we zip the quantifiers in the signature
with the binders in the header using 'zipBinders'. In both F and G, this results in
the following zipped binders:
TyBinder LHsTyVarBndr
---------------------------------------
ZippedBinder Type -> a
ZippedBinder forall j.
ZippedBinder j -> b
At this point, we have accumulated three zipped binders which correspond to a
prefix of the standalone kind signature:
Type -> forall j. j -> ...
In step 2 (split off invisible binders), we have to decide how much remaining
invisible binders of the standalone kind signature to split off:
forall k1 k2. (k1, k2) -> Type
^^^^^^^^^^^^^
split off or not?
This decision is made in 'split_invis':
* If a user-written result kind signature is not provided, as in F,
then split off all invisible binders. This is why we need special treatment
for AnyKind.
* If a user-written result kind signature is provided, as in G,
then do as checkExpectedKind does and split off (n_sig - n_res) binders.
That is, split off such an amount of binders that the remainder of the
standalone kind signature and the user-written result kind signature have the
same amount of invisible quantifiers.
For F, split_invis splits away all invisible binders, and we have 2:
forall k1 k2. (k1, k2) -> Type
^^^^^^^^^^^^^
split away both binders
The resulting arity of F is 3+2=5. (length vis_tcbs = 3,
length invis_tcbs = 2,
length tcbs = 5)
For G, split_invis decides to split off 1 invisible binder, so that we have the
same amount of invisible quantifiers left:
res_ki = forall r2. (r1, r2) -> Type
kisig = forall k1 k2. (k1, k2) -> Type
^^^
split off this one.
The resulting arity of G is 3+1=4. (length vis_tcbs = 3,
length invis_tcbs = 1,
length tcbs = 4)
-}
{- Note [discardResult in kcCheckDeclHeader_sig]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We use 'unifyKind' to check inline kind annotations in declaration headers
against the signature.
type T :: [i] -> Maybe j -> Type
data T (a :: [k1]) (b :: Maybe k2) :: Type where ...
Here, we will unify:
[k1] ~ [i]
Maybe k2 ~ Maybe j
Type ~ Type
The end result is that we fill in unification variables k1, k2:
k1 := i
k2 := j
We also validate that the user isn't confused:
type T :: Type -> Type
data T (a :: Bool) = ...
This will report that (Type ~ Bool) failed to unify.
Now, consider the following example:
type family Id a where Id x = x
type T :: Bool -> Type
type T (a :: Id Bool) = ...
We will unify (Bool ~ Id Bool), and this will produce a non-reflexive coercion.
However, we are free to discard it, as the kind of 'T' is determined by the
signature, not by the inline kind annotation:
we have T :: Bool -> Type
rather than T :: Id Bool -> Type
This (Id Bool) will not show up anywhere after we're done validating it, so we
have no use for the produced coercion.
-}
{- Note [No polymorphic recursion]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Should this kind-check?
data T ka (a::ka) b = MkT (T Type Int Bool)
(T (Type -> Type) Maybe Bool)
Notice that T is used at two different kinds in its RHS. No!
This should not kind-check. Polymorphic recursion is known to
be a tough nut.
Previously, we laboriously (with help from the renamer)
tried to give T the polymorphic kind
T :: forall ka -> ka -> kappa -> Type
where kappa is a unification variable, even in the inferInitialKinds
phase (which is what kcInferDeclHeader is all about). But
that is dangerously fragile (see the ticket).
Solution: make kcInferDeclHeader give T a straightforward
monomorphic kind, with no quantification whatsoever. That's why
we use mkAnonTyConBinder for all arguments when figuring out
tc_binders.
But notice that (#16322 comment:3)
* The algorithm successfully kind-checks this declaration:
data T2 ka (a::ka) = MkT2 (T2 Type a)
Starting with (inferInitialKinds)
T2 :: (kappa1 :: kappa2 :: *) -> (kappa3 :: kappa4 :: *) -> *
we get
kappa4 := kappa1 -- from the (a:ka) kind signature
kappa1 := Type -- From application T2 Type
These constraints are soluble so generaliseTcTyCon gives
T2 :: forall (k::Type) -> k -> *
But now the /typechecking/ (aka desugaring, tcTyClDecl) phase
fails, because the call (T2 Type a) in the RHS is ill-kinded.
We'd really prefer all errors to show up in the kind checking
phase.
* This algorithm still accepts (in all phases)
data T3 ka (a::ka) = forall b. MkT3 (T3 Type b)
although T3 is really polymorphic-recursive too.
Perhaps we should somehow reject that.
Note [Kind-checking tyvar binders for associated types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When kind-checking the type-variable binders for associated
data/newtype decls
family decls
we behave specially for type variables that are already in scope;
that is, bound by the enclosing class decl. This is done in
kcLHsQTyVarBndrs:
* The use of tcImplicitQTKBndrs
* The tcLookupLocal_maybe code in kc_hs_tv
See Note [Associated type tyvar names] in Class and
Note [TyVar binders for associated decls] in GHC.Hs.Decls
We must do the same for family instance decls, where the in-scope
variables may be bound by the enclosing class instance decl.
Hence the use of tcImplicitQTKBndrs in tcFamTyPatsAndGen.
Note [Kind variable ordering for associated types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
What should be the kind of `T` in the following example? (#15591)
class C (a :: Type) where
type T (x :: f a)
As per Note [Ordering of implicit variables] in GHC.Rename.Types, we want to quantify
the kind variables in left-to-right order of first occurrence in order to
support visible kind application. But we cannot perform this analysis on just
T alone, since its variable `a` actually occurs /before/ `f` if you consider
the fact that `a` was previously bound by the parent class `C`. That is to say,
the kind of `T` should end up being:
T :: forall a f. f a -> Type
(It wouldn't necessarily be /wrong/ if the kind ended up being, say,
forall f a. f a -> Type, but that would not be as predictable for users of
visible kind application.)
In contrast, if `T` were redefined to be a top-level type family, like `T2`
below:
type family T2 (x :: f (a :: Type))
Then `a` first appears /after/ `f`, so the kind of `T2` should be:
T2 :: forall f a. f a -> Type
In order to make this distinction, we need to know (in kcCheckDeclHeader) which
type variables have been bound by the parent class (if there is one). With
the class-bound variables in hand, we can ensure that we always quantify
these first.
-}
{- *********************************************************************
* *
Expected kinds
* *
********************************************************************* -}
-- | Describes the kind expected in a certain context.
data ContextKind = TheKind Kind -- ^ a specific kind
| AnyKind -- ^ any kind will do
| OpenKind -- ^ something of the form @TYPE _@
-----------------------
newExpectedKind :: ContextKind -> TcM Kind
newExpectedKind (TheKind k) = return k
newExpectedKind AnyKind = newMetaKindVar
newExpectedKind OpenKind = newOpenTypeKind
-----------------------
expectedKindInCtxt :: UserTypeCtxt -> ContextKind
-- Depending on the context, we might accept any kind (for instance, in a TH
-- splice), or only certain kinds (like in type signatures).
expectedKindInCtxt (TySynCtxt _) = AnyKind
expectedKindInCtxt ThBrackCtxt = AnyKind
expectedKindInCtxt (GhciCtxt {}) = AnyKind
-- The types in a 'default' decl can have varying kinds
-- See Note [Extended defaults]" in TcEnv
expectedKindInCtxt DefaultDeclCtxt = AnyKind
expectedKindInCtxt TypeAppCtxt = AnyKind
expectedKindInCtxt (ForSigCtxt _) = TheKind liftedTypeKind
expectedKindInCtxt (InstDeclCtxt {}) = TheKind constraintKind
expectedKindInCtxt SpecInstCtxt = TheKind constraintKind
expectedKindInCtxt _ = OpenKind
{- *********************************************************************
* *
Bringing type variables into scope
* *
********************************************************************* -}
{- Note [Non-cloning for tyvar binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
bindExplictTKBndrs_Q_Skol, bindExplictTKBndrs_Skol, do not clone;
and nor do the Implicit versions. There is no need.
bindExplictTKBndrs_Q_Tv does not clone; and similarly Implicit.
We take advantage of this in kcInferDeclHeader:
all_tv_prs = mkTyVarNamePairs (scoped_kvs ++ tc_tvs)
If we cloned, we'd need to take a bit more care here; not hard.
The main payoff is that avoidng gratuitious cloning means that we can
almost always take the fast path in swizzleTcTyConBndrs. "Almost
always" means not the case of mutual recursion with polymorphic kinds.
Note [Cloning for tyvar binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
bindExplicitTKBndrs_Tv does cloning, making up a Name with a fresh Unique,
unlike bindExplicitTKBndrs_Q_Tv. (Nor do the Skol variants clone.)
And similarly for bindImplicit...
This for a narrow and tricky reason which, alas, I couldn't find a
simpler way round. #16221 is the poster child:
data SameKind :: k -> k -> *
data T a = forall k2 (b :: k2). MkT (SameKind a b) !Int
When kind-checking T, we give (a :: kappa1). Then:
- In kcConDecl we make a TyVarTv unification variable kappa2 for k2
(as described in Note [Kind-checking for GADTs], even though this
example is an existential)
- So we get (b :: kappa2) via bindExplicitTKBndrs_Tv
- We end up unifying kappa1 := kappa2, because of the (SameKind a b)
Now we generalise over kappa2. But if kappa2's Name is precisely k2
(i.e. we did not clone) we'll end up giving T the utterlly final kind
T :: forall k2. k2 -> *
Nothing directly wrong with that but when we typecheck the data constructor
we have k2 in scope; but then it's brought into scope /again/ when we find
the forall k2. This is chaotic, and we end up giving it the type
MkT :: forall k2 (a :: k2) k2 (b :: k2).
SameKind @k2 a b -> Int -> T @{k2} a
which is bogus -- because of the shadowing of k2, we can't
apply T to the kind or a!
And there no reason /not/ to clone the Name when making a unification
variable. So that's what we do.
-}
--------------------------------------
-- Implicit binders
--------------------------------------
bindImplicitTKBndrs_Skol, bindImplicitTKBndrs_Tv,
bindImplicitTKBndrs_Q_Skol, bindImplicitTKBndrs_Q_Tv
:: [Name] -> TcM a -> TcM ([TcTyVar], a)
bindImplicitTKBndrs_Q_Skol = bindImplicitTKBndrsX (newImplicitTyVarQ newFlexiKindedSkolemTyVar)
bindImplicitTKBndrs_Q_Tv = bindImplicitTKBndrsX (newImplicitTyVarQ newFlexiKindedTyVarTyVar)
bindImplicitTKBndrs_Skol = bindImplicitTKBndrsX newFlexiKindedSkolemTyVar
bindImplicitTKBndrs_Tv = bindImplicitTKBndrsX cloneFlexiKindedTyVarTyVar
-- newFlexiKinded... see Note [Non-cloning for tyvar binders]
-- cloneFlexiKindedTyVarTyVar: see Note [Cloning for tyvar binders]
bindImplicitTKBndrsX
:: (Name -> TcM TcTyVar) -- new_tv function
-> [Name]
-> TcM a
-> TcM ([TcTyVar], a) -- Returned [TcTyVar] are in 1-1 correspondence
-- with the passed in [Name]
bindImplicitTKBndrsX new_tv tv_names thing_inside
= do { tkvs <- mapM new_tv tv_names
; traceTc "bindImplicitTKBndrs" (ppr tv_names $$ ppr tkvs)
; res <- tcExtendNameTyVarEnv (tv_names `zip` tkvs)
thing_inside
; return (tkvs, res) }
newImplicitTyVarQ :: (Name -> TcM TcTyVar) -> Name -> TcM TcTyVar
-- Behave like new_tv, except that if the tyvar is in scope, use it
newImplicitTyVarQ new_tv name
= do { mb_tv <- tcLookupLcl_maybe name
; case mb_tv of
Just (ATyVar _ tv) -> return tv
_ -> new_tv name }
newFlexiKindedTyVar :: (Name -> Kind -> TcM TyVar) -> Name -> TcM TyVar
newFlexiKindedTyVar new_tv name
= do { kind <- newMetaKindVar
; new_tv name kind }
newFlexiKindedSkolemTyVar :: Name -> TcM TyVar
newFlexiKindedSkolemTyVar = newFlexiKindedTyVar newSkolemTyVar
newFlexiKindedTyVarTyVar :: Name -> TcM TyVar
newFlexiKindedTyVarTyVar = newFlexiKindedTyVar newTyVarTyVar
cloneFlexiKindedTyVarTyVar :: Name -> TcM TyVar
cloneFlexiKindedTyVarTyVar = newFlexiKindedTyVar cloneTyVarTyVar
-- See Note [Cloning for tyvar binders]
--------------------------------------
-- Explicit binders
--------------------------------------
bindExplicitTKBndrs_Skol, bindExplicitTKBndrs_Tv
:: [LHsTyVarBndr GhcRn]
-> TcM a
-> TcM ([TcTyVar], a)
bindExplicitTKBndrs_Skol = bindExplicitTKBndrsX (tcHsTyVarBndr newSkolemTyVar)
bindExplicitTKBndrs_Tv = bindExplicitTKBndrsX (tcHsTyVarBndr cloneTyVarTyVar)
-- newSkolemTyVar: see Note [Non-cloning for tyvar binders]
-- cloneTyVarTyVar: see Note [Cloning for tyvar binders]
bindExplicitTKBndrs_Q_Skol, bindExplicitTKBndrs_Q_Tv
:: ContextKind
-> [LHsTyVarBndr GhcRn]
-> TcM a
-> TcM ([TcTyVar], a)
bindExplicitTKBndrs_Q_Skol ctxt_kind = bindExplicitTKBndrsX (tcHsQTyVarBndr ctxt_kind newSkolemTyVar)
bindExplicitTKBndrs_Q_Tv ctxt_kind = bindExplicitTKBndrsX (tcHsQTyVarBndr ctxt_kind newTyVarTyVar)
-- See Note [Non-cloning for tyvar binders]
bindExplicitTKBndrsX
:: (HsTyVarBndr GhcRn -> TcM TcTyVar)
-> [LHsTyVarBndr GhcRn]
-> TcM a
-> TcM ([TcTyVar], a) -- Returned [TcTyVar] are in 1-1 correspondence
-- with the passed-in [LHsTyVarBndr]
bindExplicitTKBndrsX tc_tv hs_tvs thing_inside
= do { traceTc "bindExplicTKBndrs" (ppr hs_tvs)
; go hs_tvs }
where
go [] = do { res <- thing_inside
; return ([], res) }
go (L _ hs_tv : hs_tvs)
= do { tv <- tc_tv hs_tv
-- Extend the environment as we go, in case a binder
-- is mentioned in the kind of a later binder
-- e.g. forall k (a::k). blah
-- NB: tv's Name may differ from hs_tv's
-- See TcMType Note [Cloning for tyvar binders]
; (tvs,res) <- tcExtendNameTyVarEnv [(hsTyVarName hs_tv, tv)] $
go hs_tvs
; return (tv:tvs, res) }
-----------------
tcHsTyVarBndr :: (Name -> Kind -> TcM TyVar)
-> HsTyVarBndr GhcRn -> TcM TcTyVar
tcHsTyVarBndr new_tv (UserTyVar _ (L _ tv_nm))
= do { kind <- newMetaKindVar
; new_tv tv_nm kind }
tcHsTyVarBndr new_tv (KindedTyVar _ (L _ tv_nm) lhs_kind)
= do { kind <- tcLHsKindSig (TyVarBndrKindCtxt tv_nm) lhs_kind
; new_tv tv_nm kind }
tcHsTyVarBndr _ (XTyVarBndr nec) = noExtCon nec
-----------------
tcHsQTyVarBndr :: ContextKind
-> (Name -> Kind -> TcM TyVar)
-> HsTyVarBndr GhcRn -> TcM TcTyVar
-- Just like tcHsTyVarBndr, but also
-- - uses the in-scope TyVar from class, if it exists
-- - takes a ContextKind to use for the no-sig case
tcHsQTyVarBndr ctxt_kind new_tv (UserTyVar _ (L _ tv_nm))
= do { mb_tv <- tcLookupLcl_maybe tv_nm
; case mb_tv of
Just (ATyVar _ tv) -> return tv
_ -> do { kind <- newExpectedKind ctxt_kind
; new_tv tv_nm kind } }
tcHsQTyVarBndr _ new_tv (KindedTyVar _ (L _ tv_nm) lhs_kind)
= do { kind <- tcLHsKindSig (TyVarBndrKindCtxt tv_nm) lhs_kind
; mb_tv <- tcLookupLcl_maybe tv_nm
; case mb_tv of
Just (ATyVar _ tv)
-> do { discardResult $ unifyKind (Just hs_tv)
kind (tyVarKind tv)
-- This unify rejects:
-- class C (m :: * -> *) where
-- type F (m :: *) = ...
; return tv }
_ -> new_tv tv_nm kind }
where
hs_tv = HsTyVar noExtField NotPromoted (noLoc tv_nm)
-- Used for error messages only
tcHsQTyVarBndr _ _ (XTyVarBndr nec) = noExtCon nec
--------------------------------------
-- Binding type/class variables in the
-- kind-checking and typechecking phases
--------------------------------------
bindTyClTyVars :: Name
-> ([TyConBinder] -> Kind -> TcM a) -> TcM a
-- ^ Used for the type variables of a type or class decl
-- in the "kind checking" and "type checking" pass,
-- but not in the initial-kind run.
bindTyClTyVars tycon_name thing_inside
= do { tycon <- kcLookupTcTyCon tycon_name
; let scoped_prs = tcTyConScopedTyVars tycon
res_kind = tyConResKind tycon
binders = tyConBinders tycon
; traceTc "bindTyClTyVars" (ppr tycon_name <+> ppr binders $$ ppr scoped_prs)
; tcExtendNameTyVarEnv scoped_prs $
thing_inside binders res_kind }
-- inferInitialKind has made a suitably-shaped kind for the type or class
-- Look it up in the local environment. This is used only for tycons
-- that we're currently type-checking, so we're sure to find a TcTyCon.
kcLookupTcTyCon :: Name -> TcM TcTyCon
kcLookupTcTyCon nm
= do { tc_ty_thing <- tcLookup nm
; return $ case tc_ty_thing of
ATcTyCon tc -> tc
_ -> pprPanic "kcLookupTcTyCon" (ppr tc_ty_thing) }
{- *********************************************************************
* *
Kind generalisation
* *
********************************************************************* -}
zonkAndScopedSort :: [TcTyVar] -> TcM [TcTyVar]
zonkAndScopedSort spec_tkvs
= do { spec_tkvs <- mapM zonkAndSkolemise spec_tkvs
-- Use zonkAndSkolemise because a skol_tv might be a TyVarTv
-- Do a stable topological sort, following
-- Note [Ordering of implicit variables] in GHC.Rename.Types
; return (scopedSort spec_tkvs) }
-- | Generalize some of the free variables in the given type.
-- All such variables should be *kind* variables; any type variables
-- should be explicitly quantified (with a `forall`) before now.
-- The supplied predicate says which free variables to quantify.
-- But in all cases,
-- generalize only those variables whose TcLevel is strictly greater
-- than the ambient level. This "strictly greater than" means that
-- you likely need to push the level before creating whatever type
-- gets passed here. Any variable whose level is greater than the
-- ambient level but is not selected to be generalized will be
-- promoted. (See [Promoting unification variables] in TcSimplify
-- and Note [Recipe for checking a signature].)
-- The resulting KindVar are the variables to
-- quantify over, in the correct, well-scoped order. They should
-- generally be Inferred, not Specified, but that's really up to
-- the caller of this function.
kindGeneralizeSome :: (TcTyVar -> Bool)
-> TcType -- ^ needn't be zonked
-> TcM [KindVar]
kindGeneralizeSome should_gen kind_or_type
= do { traceTc "kindGeneralizeSome {" (ppr kind_or_type)
-- use the "Kind" variant here, as any types we see
-- here will already have all type variables quantified;
-- thus, every free variable is really a kv, never a tv.
; dvs <- candidateQTyVarsOfKind kind_or_type
-- So 'dvs' are the variables free in kind_or_type, with a level greater
-- than the ambient level, hence candidates for quantification
-- Next: filter out the ones we don't want to generalize (specified by should_gen)
-- and promote them instead
; let (to_promote, dvs') = partitionCandidates dvs (not . should_gen)
; (_, promoted) <- promoteTyVarSet (dVarSetToVarSet to_promote)
; qkvs <- quantifyTyVars dvs'
; traceTc "kindGeneralizeSome }" $
vcat [ text "Kind or type:" <+> ppr kind_or_type
, text "dvs:" <+> ppr dvs
, text "dvs':" <+> ppr dvs'
, text "to_promote:" <+> pprTyVars (dVarSetElems to_promote)
, text "promoted:" <+> pprTyVars (nonDetEltsUniqSet promoted)
, text "qkvs:" <+> pprTyVars qkvs ]
; return qkvs }
-- | Specialized version of 'kindGeneralizeSome', but where all variables
-- can be generalized. Use this variant when you can be sure that no more
-- constraints on the type's metavariables will arise or be solved.
kindGeneralizeAll :: TcType -- needn't be zonked
-> TcM [KindVar]
kindGeneralizeAll ty = do { traceTc "kindGeneralizeAll" empty
; kindGeneralizeSome (const True) ty }
-- | Specialized version of 'kindGeneralizeSome', but where no variables
-- can be generalized. Use this variant when it is unknowable whether metavariables
-- might later be constrained.
-- See Note [Recipe for checking a signature] for why and where this
-- function is needed.
kindGeneralizeNone :: TcType -- needn't be zonked
-> TcM ()
kindGeneralizeNone ty
= do { traceTc "kindGeneralizeNone" empty
; kvs <- kindGeneralizeSome (const False) ty
; MASSERT( null kvs )
}
{- Note [Levels and generalisation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f x = e
with no type signature. We are currently at level i.
We must
* Push the level to level (i+1)
* Allocate a fresh alpha[i+1] for the result type
* Check that e :: alpha[i+1], gathering constraint WC
* Solve WC as far as possible
* Zonking the result type alpha[i+1], say to beta[i-1] -> gamma[i]
* Find the free variables with level > i, in this case gamma[i]
* Skolemise those free variables and quantify over them, giving
f :: forall g. beta[i-1] -> g
* Emit the residiual constraint wrapped in an implication for g,
thus forall g. WC
All of this happens for types too. Consider
f :: Int -> (forall a. Proxy a -> Int)
Note [Kind generalisation]
~~~~~~~~~~~~~~~~~~~~~~~~~~
We do kind generalisation only at the outer level of a type signature.
For example, consider
T :: forall k. k -> *
f :: (forall a. T a -> Int) -> Int
When kind-checking f's type signature we generalise the kind at
the outermost level, thus:
f1 :: forall k. (forall (a:k). T k a -> Int) -> Int -- YES!
and *not* at the inner forall:
f2 :: (forall k. forall (a:k). T k a -> Int) -> Int -- NO!
Reason: same as for HM inference on value level declarations,
we want to infer the most general type. The f2 type signature
would be *less applicable* than f1, because it requires a more
polymorphic argument.
NB: There are no explicit kind variables written in f's signature.
When there are, the renamer adds these kind variables to the list of
variables bound by the forall, so you can indeed have a type that's
higher-rank in its kind. But only by explicit request.
Note [Kinds of quantified type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
tcTyVarBndrsGen quantifies over a specified list of type variables,
*and* over the kind variables mentioned in the kinds of those tyvars.
Note that we must zonk those kinds (obviously) but less obviously, we
must return type variables whose kinds are zonked too. Example
(a :: k7) where k7 := k9 -> k9
We must return
[k9, a:k9->k9]
and NOT
[k9, a:k7]
Reason: we're going to turn this into a for-all type,
forall k9. forall (a:k7). blah
which the type checker will then instantiate, and instantiate does not
look through unification variables!
Hence using zonked_kinds when forming tvs'.
-}
-----------------------------------
etaExpandAlgTyCon :: [TyConBinder]
-> Kind
-> TcM ([TyConBinder], Kind)
-- GADT decls can have a (perhaps partial) kind signature
-- e.g. data T a :: * -> * -> * where ...
-- This function makes up suitable (kinded) TyConBinders for the
-- argument kinds. E.g. in this case it might return
-- ([b::*, c::*], *)
-- Never emits constraints.
-- It's a little trickier than you might think: see
-- Note [TyConBinders for the result kind signature of a data type]
etaExpandAlgTyCon tc_bndrs kind
= do { loc <- getSrcSpanM
; uniqs <- newUniqueSupply
; rdr_env <- getLocalRdrEnv
; let new_occs = [ occ
| str <- allNameStrings
, let occ = mkOccName tvName str
, isNothing (lookupLocalRdrOcc rdr_env occ)
-- Note [Avoid name clashes for associated data types]
, not (occ `elem` lhs_occs) ]
new_uniqs = uniqsFromSupply uniqs
subst = mkEmptyTCvSubst (mkInScopeSet (mkVarSet lhs_tvs))
; return (go loc new_occs new_uniqs subst [] kind) }
where
lhs_tvs = map binderVar tc_bndrs
lhs_occs = map getOccName lhs_tvs
go loc occs uniqs subst acc kind
= case splitPiTy_maybe kind of
Nothing -> (reverse acc, substTy subst kind)
Just (Anon af arg, kind')
-> go loc occs' uniqs' subst' (tcb : acc) kind'
where
arg' = substTy subst arg
tv = mkTyVar (mkInternalName uniq occ loc) arg'
subst' = extendTCvInScope subst tv
tcb = Bndr tv (AnonTCB af)
(uniq:uniqs') = uniqs
(occ:occs') = occs
Just (Named (Bndr tv vis), kind')
-> go loc occs uniqs subst' (tcb : acc) kind'
where
(subst', tv') = substTyVarBndr subst tv
tcb = Bndr tv' (NamedTCB vis)
-- | A description of whether something is a
--
-- * @data@ or @newtype@ ('DataDeclSort')
--
-- * @data instance@ or @newtype instance@ ('DataInstanceSort')
--
-- * @data family@ ('DataFamilySort')
--
-- At present, this data type is only consumed by 'checkDataKindSig'.
data DataSort
= DataDeclSort NewOrData
| DataInstanceSort NewOrData
| DataFamilySort
-- | Checks that the return kind in a data declaration's kind signature is
-- permissible. There are three cases:
--
-- If dealing with a @data@, @newtype@, @data instance@, or @newtype instance@
-- declaration, check that the return kind is @Type@.
--
-- If the declaration is a @newtype@ or @newtype instance@ and the
-- @UnliftedNewtypes@ extension is enabled, this check is slightly relaxed so
-- that a return kind of the form @TYPE r@ (for some @r@) is permitted.
-- See @Note [Implementation of UnliftedNewtypes]@ in "TcTyClsDecls".
--
-- If dealing with a @data family@ declaration, check that the return kind is
-- either of the form:
--
-- 1. @TYPE r@ (for some @r@), or
--
-- 2. @k@ (where @k@ is a bare kind variable; see #12369)
checkDataKindSig :: DataSort -> Kind -> TcM ()
checkDataKindSig data_sort kind = do
dflags <- getDynFlags
checkTc (is_TYPE_or_Type dflags || is_kind_var) (err_msg dflags)
where
pp_dec :: SDoc
pp_dec = text $
case data_sort of
DataDeclSort DataType -> "data type"
DataDeclSort NewType -> "newtype"
DataInstanceSort DataType -> "data instance"
DataInstanceSort NewType -> "newtype instance"
DataFamilySort -> "data family"
is_newtype :: Bool
is_newtype =
case data_sort of
DataDeclSort new_or_data -> new_or_data == NewType
DataInstanceSort new_or_data -> new_or_data == NewType
DataFamilySort -> False
is_data_family :: Bool
is_data_family =
case data_sort of
DataDeclSort{} -> False
DataInstanceSort{} -> False
DataFamilySort -> True
tYPE_ok :: DynFlags -> Bool
tYPE_ok dflags =
(is_newtype && xopt LangExt.UnliftedNewtypes dflags)
-- With UnliftedNewtypes, we allow kinds other than Type, but they
-- must still be of the form `TYPE r` since we don't want to accept
-- Constraint or Nat.
-- See Note [Implementation of UnliftedNewtypes] in TcTyClsDecls.
|| is_data_family
-- If this is a `data family` declaration, we don't need to check if
-- UnliftedNewtypes is enabled, since data family declarations can
-- have return kind `TYPE r` unconditionally (#16827).
is_TYPE :: Bool
is_TYPE = tcIsRuntimeTypeKind kind
is_TYPE_or_Type :: DynFlags -> Bool
is_TYPE_or_Type dflags | tYPE_ok dflags = is_TYPE
| otherwise = tcIsLiftedTypeKind kind
-- In the particular case of a data family, permit a return kind of the
-- form `:: k` (where `k` is a bare kind variable).
is_kind_var :: Bool
is_kind_var | is_data_family = isJust (tcGetCastedTyVar_maybe kind)
| otherwise = False
err_msg :: DynFlags -> SDoc
err_msg dflags =
sep [ (sep [ text "Kind signature on" <+> pp_dec <+>
text "declaration has non-" <>
(if tYPE_ok dflags then text "TYPE" else ppr liftedTypeKind)
, (if is_data_family then text "and non-variable" else empty) <+>
text "return kind" <+> quotes (ppr kind) ])
, if not (tYPE_ok dflags) && is_TYPE && is_newtype &&
not (xopt LangExt.UnliftedNewtypes dflags)
then text "Perhaps you intended to use UnliftedNewtypes"
else empty ]
-- | Checks that the result kind of a class is exactly `Constraint`, rejecting
-- type synonyms and type families that reduce to `Constraint`. See #16826.
checkClassKindSig :: Kind -> TcM ()
checkClassKindSig kind = checkTc (tcIsConstraintKind kind) err_msg
where
err_msg :: SDoc
err_msg =
text "Kind signature on a class must end with" <+> ppr constraintKind $$
text "unobscured by type families"
tcbVisibilities :: TyCon -> [Type] -> [TyConBndrVis]
-- Result is in 1-1 correspondence with orig_args
tcbVisibilities tc orig_args
= go (tyConKind tc) init_subst orig_args
where
init_subst = mkEmptyTCvSubst (mkInScopeSet (tyCoVarsOfTypes orig_args))
go _ _ []
= []
go fun_kind subst all_args@(arg : args)
| Just (tcb, inner_kind) <- splitPiTy_maybe fun_kind
= case tcb of
Anon af _ -> AnonTCB af : go inner_kind subst args
Named (Bndr tv vis) -> NamedTCB vis : go inner_kind subst' args
where
subst' = extendTCvSubst subst tv arg
| not (isEmptyTCvSubst subst)
= go (substTy subst fun_kind) init_subst all_args
| otherwise
= pprPanic "addTcbVisibilities" (ppr tc <+> ppr orig_args)
{- Note [TyConBinders for the result kind signature of a data type]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Given
data T (a::*) :: * -> forall k. k -> *
we want to generate the extra TyConBinders for T, so we finally get
(a::*) (b::*) (k::*) (c::k)
The function etaExpandAlgTyCon generates these extra TyConBinders from
the result kind signature.
We need to take care to give the TyConBinders
(a) OccNames that are fresh (because the TyConBinders of a TyCon
must have distinct OccNames
(b) Uniques that are fresh (obviously)
For (a) we need to avoid clashes with the tyvars declared by
the user before the "::"; in the above example that is 'a'.
And also see Note [Avoid name clashes for associated data types].
For (b) suppose we have
data T :: forall k. k -> forall k. k -> *
where the two k's are identical even up to their uniques. Surprisingly,
this can happen: see #14515.
It's reasonably easy to solve all this; just run down the list with a
substitution; hence the recursive 'go' function. But it has to be
done.
Note [Avoid name clashes for associated data types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider class C a b where
data D b :: * -> *
When typechecking the decl for D, we'll invent an extra type variable
for D, to fill out its kind. Ideally we don't want this type variable
to be 'a', because when pretty printing we'll get
class C a b where
data D b a0
(NB: the tidying happens in the conversion to Iface syntax, which happens
as part of pretty-printing a TyThing.)
That's why we look in the LocalRdrEnv to see what's in scope. This is
important only to get nice-looking output when doing ":info C" in GHCi.
It isn't essential for correctness.
************************************************************************
* *
Partial signatures
* *
************************************************************************
-}
tcHsPartialSigType
:: UserTypeCtxt
-> LHsSigWcType GhcRn -- The type signature
-> TcM ( [(Name, TcTyVar)] -- Wildcards
, Maybe TcType -- Extra-constraints wildcard
, [(Name,TcTyVar)] -- Original tyvar names, in correspondence with
-- the implicitly and explicitly bound type variables
, TcThetaType -- Theta part
, TcType ) -- Tau part
-- See Note [Checking partial type signatures]
tcHsPartialSigType ctxt sig_ty
| HsWC { hswc_ext = sig_wcs, hswc_body = ib_ty } <- sig_ty
, HsIB { hsib_ext = implicit_hs_tvs
, hsib_body = hs_ty } <- ib_ty
, (explicit_hs_tvs, L _ hs_ctxt, hs_tau) <- splitLHsSigmaTyInvis hs_ty
= addSigCtxt ctxt hs_ty $
do { (implicit_tvs, (explicit_tvs, (wcs, wcx, theta, tau)))
<- solveLocalEqualities "tcHsPartialSigType" $
-- This solveLocalEqualiltes fails fast if there are
-- insoluble equalities. See TcSimplify
-- Note [Fail fast if there are insoluble kind equalities]
tcNamedWildCardBinders sig_wcs $ \ wcs ->
bindImplicitTKBndrs_Tv implicit_hs_tvs $
bindExplicitTKBndrs_Tv explicit_hs_tvs $
do { -- Instantiate the type-class context; but if there
-- is an extra-constraints wildcard, just discard it here
(theta, wcx) <- tcPartialContext hs_ctxt
; tau <- tcHsOpenType hs_tau
; return (wcs, wcx, theta, tau) }
-- No kind-generalization here:
; kindGeneralizeNone (mkSpecForAllTys implicit_tvs $
mkSpecForAllTys explicit_tvs $
mkPhiTy theta $
tau)
-- Spit out the wildcards (including the extra-constraints one)
-- as "hole" constraints, so that they'll be reported if necessary
-- See Note [Extra-constraint holes in partial type signatures]
; emitNamedWildCardHoleConstraints wcs
-- We return a proper (Name,TyVar) environment, to be sure that
-- we bring the right name into scope in the function body.
-- Test case: partial-sigs/should_compile/LocalDefinitionBug
; let tv_prs = (implicit_hs_tvs `zip` implicit_tvs)
++ (hsLTyVarNames explicit_hs_tvs `zip` explicit_tvs)
-- NB: checkValidType on the final inferred type will be
-- done later by checkInferredPolyId. We can't do it
-- here because we don't have a complete tuype to check
; traceTc "tcHsPartialSigType" (ppr tv_prs)
; return (wcs, wcx, tv_prs, theta, tau) }
tcHsPartialSigType _ (HsWC _ (XHsImplicitBndrs nec)) = noExtCon nec
tcHsPartialSigType _ (XHsWildCardBndrs nec) = noExtCon nec
tcPartialContext :: HsContext GhcRn -> TcM (TcThetaType, Maybe TcType)
tcPartialContext hs_theta
| Just (hs_theta1, hs_ctxt_last) <- snocView hs_theta
, L wc_loc wc@(HsWildCardTy _) <- ignoreParens hs_ctxt_last
= do { wc_tv_ty <- setSrcSpan wc_loc $
tcAnonWildCardOcc wc constraintKind
; theta <- mapM tcLHsPredType hs_theta1
; return (theta, Just wc_tv_ty) }
| otherwise
= do { theta <- mapM tcLHsPredType hs_theta
; return (theta, Nothing) }
{- Note [Checking partial type signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See also Note [Recipe for checking a signature]
When we have a partial signature like
f,g :: forall a. a -> _
we do the following
* In TcSigs.tcUserSigType we return a PartialSig, which (unlike
the companion CompleteSig) contains the original, as-yet-unchecked
source-code LHsSigWcType
* Then, for f and g /separately/, we call tcInstSig, which in turn
call tchsPartialSig (defined near this Note). It kind-checks the
LHsSigWcType, creating fresh unification variables for each "_"
wildcard. It's important that the wildcards for f and g are distinct
because they might get instantiated completely differently. E.g.
f,g :: forall a. a -> _
f x = a
g x = True
It's really as if we'd written two distinct signatures.
* Note that we don't make quantified type (forall a. blah) and then
instantiate it -- it makes no sense to instantiate a type with
wildcards in it. Rather, tcHsPartialSigType just returns the
'a' and the 'blah' separately.
Nor, for the same reason, do we push a level in tcHsPartialSigType.
Note [Extra-constraint holes in partial type signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f :: (_) => a -> a
f x = ...
* The renamer leaves '_' untouched.
* Then, in tcHsPartialSigType, we make a new hole TcTyVar, in
tcWildCardBinders.
* TcBinds.chooseInferredQuantifiers fills in that hole TcTyVar
with the inferred constraints, e.g. (Eq a, Show a)
* TcErrors.mkHoleError finally reports the error.
An annoying difficulty happens if there are more than 62 inferred
constraints. Then we need to fill in the TcTyVar with (say) a 70-tuple.
Where do we find the TyCon? For good reasons we only have constraint
tuples up to 62 (see Note [How tuples work] in TysWiredIn). So how
can we make a 70-tuple? This was the root cause of #14217.
It's incredibly tiresome, because we only need this type to fill
in the hole, to communicate to the error reporting machinery. Nothing
more. So I use a HACK:
* I make an /ordinary/ tuple of the constraints, in
TcBinds.chooseInferredQuantifiers. This is ill-kinded because
ordinary tuples can't contain constraints, but it works fine. And for
ordinary tuples we don't have the same limit as for constraint
tuples (which need selectors and an associated class).
* Because it is ill-kinded, it trips an assert in writeMetaTyVar,
so now I disable the assertion if we are writing a type of
kind Constraint. (That seldom/never normally happens so we aren't
losing much.)
Result works fine, but it may eventually bite us.
************************************************************************
* *
Pattern signatures (i.e signatures that occur in patterns)
* *
********************************************************************* -}
tcHsPatSigType :: UserTypeCtxt
-> LHsSigWcType GhcRn -- The type signature
-> TcM ( [(Name, TcTyVar)] -- Wildcards
, [(Name, TcTyVar)] -- The new bit of type environment, binding
-- the scoped type variables
, TcType) -- The type
-- Used for type-checking type signatures in
-- (a) patterns e.g f (x::Int) = e
-- (b) RULE forall bndrs e.g. forall (x::Int). f x = x
--
-- This may emit constraints
-- See Note [Recipe for checking a signature]
tcHsPatSigType ctxt sig_ty
| HsWC { hswc_ext = sig_wcs, hswc_body = ib_ty } <- sig_ty
, HsIB { hsib_ext = sig_ns
, hsib_body = hs_ty } <- ib_ty
= addSigCtxt ctxt hs_ty $
do { sig_tkv_prs <- mapM new_implicit_tv sig_ns
; (wcs, sig_ty)
<- solveLocalEqualities "tcHsPatSigType" $
-- Always solve local equalities if possible,
-- else casts get in the way of deep skolemisation
-- (#16033)
tcNamedWildCardBinders sig_wcs $ \ wcs ->
tcExtendNameTyVarEnv sig_tkv_prs $
do { sig_ty <- tcHsOpenType hs_ty
; return (wcs, sig_ty) }
; emitNamedWildCardHoleConstraints wcs
-- sig_ty might have tyvars that are at a higher TcLevel (if hs_ty
-- contains a forall). Promote these.
-- Ex: f (x :: forall a. Proxy a -> ()) = ... x ...
-- When we instantiate x, we have to compare the kind of the argument
-- to a's kind, which will be a metavariable.
-- kindGeneralizeNone does this:
; kindGeneralizeNone sig_ty
; sig_ty <- zonkTcType sig_ty
; checkValidType ctxt sig_ty
; traceTc "tcHsPatSigType" (ppr sig_tkv_prs)
; return (wcs, sig_tkv_prs, sig_ty) }
where
new_implicit_tv name
= do { kind <- newMetaKindVar
; tv <- case ctxt of
RuleSigCtxt {} -> newSkolemTyVar name kind
_ -> newPatSigTyVar name kind
-- See Note [Pattern signature binders]
-- NB: tv's Name may be fresh (in the case of newPatSigTyVar)
; return (name, tv) }
tcHsPatSigType _ (HsWC _ (XHsImplicitBndrs nec)) = noExtCon nec
tcHsPatSigType _ (XHsWildCardBndrs nec) = noExtCon nec
tcPatSig :: Bool -- True <=> pattern binding
-> LHsSigWcType GhcRn
-> ExpSigmaType
-> TcM (TcType, -- The type to use for "inside" the signature
[(Name,TcTyVar)], -- The new bit of type environment, binding
-- the scoped type variables
[(Name,TcTyVar)], -- The wildcards
HsWrapper) -- Coercion due to unification with actual ty
-- Of shape: res_ty ~ sig_ty
tcPatSig in_pat_bind sig res_ty
= do { (sig_wcs, sig_tvs, sig_ty) <- tcHsPatSigType PatSigCtxt sig
-- sig_tvs are the type variables free in 'sig',
-- and not already in scope. These are the ones
-- that should be brought into scope
; if null sig_tvs then do {
-- Just do the subsumption check and return
wrap <- addErrCtxtM (mk_msg sig_ty) $
tcSubTypeET PatSigOrigin PatSigCtxt res_ty sig_ty
; return (sig_ty, [], sig_wcs, wrap)
} else do
-- Type signature binds at least one scoped type variable
-- A pattern binding cannot bind scoped type variables
-- It is more convenient to make the test here
-- than in the renamer
{ when in_pat_bind (addErr (patBindSigErr sig_tvs))
-- Now do a subsumption check of the pattern signature against res_ty
; wrap <- addErrCtxtM (mk_msg sig_ty) $
tcSubTypeET PatSigOrigin PatSigCtxt res_ty sig_ty
-- Phew!
; return (sig_ty, sig_tvs, sig_wcs, wrap)
} }
where
mk_msg sig_ty tidy_env
= do { (tidy_env, sig_ty) <- zonkTidyTcType tidy_env sig_ty
; res_ty <- readExpType res_ty -- should be filled in by now
; (tidy_env, res_ty) <- zonkTidyTcType tidy_env res_ty
; let msg = vcat [ hang (text "When checking that the pattern signature:")
4 (ppr sig_ty)
, nest 2 (hang (text "fits the type of its context:")
2 (ppr res_ty)) ]
; return (tidy_env, msg) }
patBindSigErr :: [(Name,TcTyVar)] -> SDoc
patBindSigErr sig_tvs
= hang (text "You cannot bind scoped type variable" <> plural sig_tvs
<+> pprQuotedList (map fst sig_tvs))
2 (text "in a pattern binding signature")
{- Note [Pattern signature binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See also Note [Type variables in the type environment] in TcRnTypes.
Consider
data T where
MkT :: forall a. a -> (a -> Int) -> T
f :: T -> ...
f (MkT x (f :: b -> c)) = <blah>
Here
* The pattern (MkT p1 p2) creates a *skolem* type variable 'a_sk',
It must be a skolem so that that it retains its identity, and
TcErrors.getSkolemInfo can thereby find the binding site for the skolem.
* The type signature pattern (f :: b -> c) makes freshs meta-tyvars
beta and gamma (TauTvs), and binds "b" :-> beta, "c" :-> gamma in the
environment
* Then unification makes beta := a_sk, gamma := Int
That's why we must make beta and gamma a MetaTv,
not a SkolemTv, so that it can unify to a_sk (or Int, respectively).
* Finally, in '<blah>' we have the envt "b" :-> beta, "c" :-> gamma,
so we return the pairs ("b" :-> beta, "c" :-> gamma) from tcHsPatSigType,
Another example (#13881):
fl :: forall (l :: [a]). Sing l -> Sing l
fl (SNil :: Sing (l :: [y])) = SNil
When we reach the pattern signature, 'l' is in scope from the
outer 'forall':
"a" :-> a_sk :: *
"l" :-> l_sk :: [a_sk]
We make up a fresh meta-TauTv, y_sig, for 'y', and kind-check
the pattern signature
Sing (l :: [y])
That unifies y_sig := a_sk. We return from tcHsPatSigType with
the pair ("y" :-> y_sig).
For RULE binders, though, things are a bit different (yuk).
RULE "foo" forall (x::a) (y::[a]). f x y = ...
Here this really is the binding site of the type variable so we'd like
to use a skolem, so that we get a complaint if we unify two of them
together. Hence the new_tv function in tcHsPatSigType.
************************************************************************
* *
Checking kinds
* *
************************************************************************
-}
unifyKinds :: [LHsType GhcRn] -> [(TcType, TcKind)] -> TcM ([TcType], TcKind)
unifyKinds rn_tys act_kinds
= do { kind <- newMetaKindVar
; let check rn_ty (ty, act_kind)
= checkExpectedKind (unLoc rn_ty) ty act_kind kind
; tys' <- zipWithM check rn_tys act_kinds
; return (tys', kind) }
{-
************************************************************************
* *
Sort checking kinds
* *
************************************************************************
tcLHsKindSig converts a user-written kind to an internal, sort-checked kind.
It does sort checking and desugaring at the same time, in one single pass.
-}
tcLHsKindSig :: UserTypeCtxt -> LHsKind GhcRn -> TcM Kind
tcLHsKindSig ctxt hs_kind
-- See Note [Recipe for checking a signature] in TcHsType
-- Result is zonked
= do { kind <- solveLocalEqualities "tcLHsKindSig" $
tc_lhs_kind kindLevelMode hs_kind
; traceTc "tcLHsKindSig" (ppr hs_kind $$ ppr kind)
-- No generalization:
; kindGeneralizeNone kind
; kind <- zonkTcType kind
-- This zonk is very important in the case of higher rank kinds
-- E.g. #13879 f :: forall (p :: forall z (y::z). <blah>).
-- <more blah>
-- When instantiating p's kind at occurrences of p in <more blah>
-- it's crucial that the kind we instantiate is fully zonked,
-- else we may fail to substitute properly
; checkValidType ctxt kind
; traceTc "tcLHsKindSig2" (ppr kind)
; return kind }
tc_lhs_kind :: TcTyMode -> LHsKind GhcRn -> TcM Kind
tc_lhs_kind mode k
= addErrCtxt (text "In the kind" <+> quotes (ppr k)) $
tc_lhs_type (kindLevel mode) k liftedTypeKind
promotionErr :: Name -> PromotionErr -> TcM a
promotionErr name err
= failWithTc (hang (pprPECategory err <+> quotes (ppr name) <+> text "cannot be used here")
2 (parens reason))
where
reason = case err of
ConstrainedDataConPE pred
-> text "it has an unpromotable context"
<+> quotes (ppr pred)
FamDataConPE -> text "it comes from a data family instance"
NoDataKindsTC -> text "perhaps you intended to use DataKinds"
NoDataKindsDC -> text "perhaps you intended to use DataKinds"
PatSynPE -> text "pattern synonyms cannot be promoted"
_ -> text "it is defined and used in the same recursive group"
{-
************************************************************************
* *
Error messages and such
* *
************************************************************************
-}
-- | If the inner action emits constraints, report them as errors and fail;
-- otherwise, propagates the return value. Useful as a wrapper around
-- 'tcImplicitTKBndrs', which uses solveLocalEqualities, when there won't be
-- another chance to solve constraints
failIfEmitsConstraints :: TcM a -> TcM a
failIfEmitsConstraints thing_inside
= checkNoErrs $ -- We say that we fail if there are constraints!
-- c.f same checkNoErrs in solveEqualities
do { (res, lie) <- captureConstraints thing_inside
; reportAllUnsolved lie
; return res
}
-- | Make an appropriate message for an error in a function argument.
-- Used for both expressions and types.
funAppCtxt :: (Outputable fun, Outputable arg) => fun -> arg -> Int -> SDoc
funAppCtxt fun arg arg_no
= hang (hsep [ text "In the", speakNth arg_no, ptext (sLit "argument of"),
quotes (ppr fun) <> text ", namely"])
2 (quotes (ppr arg))
-- | Add a "In the data declaration for T" or some such.
addTyConFlavCtxt :: Name -> TyConFlavour -> TcM a -> TcM a
addTyConFlavCtxt name flav
= addErrCtxt $ hsep [ text "In the", ppr flav
, text "declaration for", quotes (ppr name) ]
|
sdiehl/ghc
|
compiler/typecheck/TcHsType.hs
|
bsd-3-clause
| 149,974 | 638 | 37 | 40,778 | 16,300 | 9,611 | 6,689 | -1 | -1 |
{-# OPTIONS_HADDOCK hide #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE UnboxedTuples #-}
{-# LANGUAGE ScopedTypeVariables #-}
#include "inline.hs"
-- |
-- Module : Streamly.Internal.Memory.ArrayStream
-- Copyright : (c) 2019 Composewell Technologies
--
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC
--
-- Combinators to efficiently manipulate streams of arrays.
--
module Streamly.Internal.Memory.ArrayStream
(
-- * Creation
arraysOf
-- * Flattening to elements
, concat
, concatRev
, interpose
, interposeSuffix
, intercalateSuffix
-- * Transformation
, splitOn
, splitOnSuffix
, compact -- compact
-- * Elimination
, toArray
)
where
import Control.Monad.IO.Class (MonadIO(..))
-- import Data.Functor.Identity (Identity)
import Data.Word (Word8)
import Foreign.ForeignPtr (withForeignPtr)
import Foreign.Ptr (minusPtr, plusPtr, castPtr)
import Foreign.Storable (Storable(..))
import Prelude hiding (length, null, last, map, (!!), read, concat)
import Streamly.Internal.Memory.Array.Types (Array(..), length)
import Streamly.Streams.Serial (SerialT)
import Streamly.Streams.StreamK.Type (IsStream)
import qualified Streamly.Internal.Memory.Array as A
import qualified Streamly.Internal.Memory.Array.Types as A
import qualified Streamly.Internal.Prelude as S
import qualified Streamly.Streams.StreamD as D
import qualified Streamly.Streams.Prelude as P
-- XXX efficiently compare two streams of arrays. Two streams can have chunks
-- of different sizes, we can handle that in the stream comparison abstraction.
-- This could be useful e.g. to fast compare whether two files differ.
-- | Convert a stream of arrays into a stream of their elements.
--
-- Same as the following but more efficient:
--
-- > concat = S.concatMap A.read
--
-- @since 0.7.0
{-# INLINE concat #-}
concat :: (IsStream t, MonadIO m, Storable a) => t m (Array a) -> t m a
-- concat m = D.fromStreamD $ A.flattenArrays (D.toStreamD m)
-- concat m = D.fromStreamD $ D.concatMap A.toStreamD (D.toStreamD m)
concat m = D.fromStreamD $ D.concatMapU A.read (D.toStreamD m)
-- XXX should we have a reverseArrays API to reverse the stream of arrays
-- instead?
--
-- | Convert a stream of arrays into a stream of their elements reversing the
-- contents of each array before flattening.
--
-- @since 0.7.0
{-# INLINE concatRev #-}
concatRev :: (IsStream t, MonadIO m, Storable a) => t m (Array a) -> t m a
concatRev m = D.fromStreamD $ A.flattenArraysRev (D.toStreamD m)
-- | Flatten a stream of arrays after inserting the given element between
-- arrays.
--
-- /Internal/
{-# INLINE interpose #-}
interpose :: (MonadIO m, IsStream t, Storable a) => a -> t m (Array a) -> t m a
interpose x = S.interpose x A.read
{-# INLINE intercalateSuffix #-}
intercalateSuffix :: (MonadIO m, IsStream t, Storable a)
=> Array a -> t m (Array a) -> t m a
intercalateSuffix arr = S.intercalateSuffix arr A.read
-- | Flatten a stream of arrays appending the given element after each
-- array.
--
-- @since 0.7.0
{-# INLINE interposeSuffix #-}
interposeSuffix :: (MonadIO m, IsStream t, Storable a)
=> a -> t m (Array a) -> t m a
-- interposeSuffix x = D.fromStreamD . A.unlines x . D.toStreamD
interposeSuffix x = S.interposeSuffix x A.read
-- | Split a stream of arrays on a given separator byte, dropping the separator
-- and coalescing all the arrays between two separators into a single array.
--
-- @since 0.7.0
{-# INLINE splitOn #-}
splitOn
:: (IsStream t, MonadIO m)
=> Word8
-> t m (Array Word8)
-> t m (Array Word8)
splitOn byte s =
D.fromStreamD $ D.splitInnerBy (A.breakOn byte) A.spliceTwo $ D.toStreamD s
{-# INLINE splitOnSuffix #-}
splitOnSuffix
:: (IsStream t, MonadIO m)
=> Word8
-> t m (Array Word8)
-> t m (Array Word8)
-- splitOn byte s = D.fromStreamD $ A.splitOn byte $ D.toStreamD s
splitOnSuffix byte s =
D.fromStreamD $ D.splitInnerBySuffix (A.breakOn byte) A.spliceTwo $ D.toStreamD s
-- | Coalesce adjacent arrays in incoming stream to form bigger arrays of a
-- maximum specified size in bytes.
--
-- @since 0.7.0
{-# INLINE compact #-}
compact :: (MonadIO m, Storable a)
=> Int -> SerialT m (Array a) -> SerialT m (Array a)
compact n xs = D.fromStreamD $ A.packArraysChunksOf n (D.toStreamD xs)
-- | @arraysOf n stream@ groups the elements in the input stream into arrays of
-- @n@ elements each.
--
-- Same as the following but more efficient:
--
-- > arraysOf n = S.chunksOf n (A.writeN n)
--
-- @since 0.7.0
{-# INLINE arraysOf #-}
arraysOf :: (IsStream t, MonadIO m, Storable a)
=> Int -> t m a -> t m (Array a)
arraysOf n str =
D.fromStreamD $ A.fromStreamDArraysOf n (D.toStreamD str)
-- XXX Both of these implementations of splicing seem to perform equally well.
-- We need to perform benchmarks over a range of sizes though.
-- CAUTION! length must more than equal to lengths of all the arrays in the
-- stream.
{-# INLINE spliceArraysLenUnsafe #-}
spliceArraysLenUnsafe :: (MonadIO m, Storable a)
=> Int -> SerialT m (Array a) -> m (Array a)
spliceArraysLenUnsafe len buffered = do
arr <- liftIO $ A.newArray len
end <- S.foldlM' writeArr (aEnd arr) buffered
return $ arr {aEnd = end}
where
writeArr dst Array{..} =
liftIO $ withForeignPtr aStart $ \src -> do
let count = aEnd `minusPtr` src
A.memcpy (castPtr dst) (castPtr src) count
return $ dst `plusPtr` count
{-# INLINE _spliceArraysBuffered #-}
_spliceArraysBuffered :: (MonadIO m, Storable a)
=> SerialT m (Array a) -> m (Array a)
_spliceArraysBuffered s = do
buffered <- P.foldr S.cons S.nil s
len <- S.sum (S.map length buffered)
spliceArraysLenUnsafe len s
{-# INLINE spliceArraysRealloced #-}
spliceArraysRealloced :: forall m a. (MonadIO m, Storable a)
=> SerialT m (Array a) -> m (Array a)
spliceArraysRealloced s = do
idst <- liftIO $ A.newArray (A.bytesToElemCount (undefined :: a)
(A.mkChunkSizeKB 4))
arr <- S.foldlM' A.spliceWithDoubling idst s
liftIO $ A.shrinkToFit arr
-- | Given a stream of arrays, splice them all together to generate a single
-- array. The stream must be /finite/.
--
-- @since 0.7.0
{-# INLINE toArray #-}
toArray :: (MonadIO m, Storable a) => SerialT m (Array a) -> m (Array a)
toArray = spliceArraysRealloced
-- spliceArrays = _spliceArraysBuffered
-- exponentially increasing sizes of the chunks upto the max limit.
-- XXX this will be easier to implement with parsers/terminating folds
-- With this we should be able to reduce the number of chunks/allocations.
-- The reallocation/copy based toArray can also be implemented using this.
--
{-
{-# INLINE toArraysInRange #-}
toArraysInRange :: (IsStream t, MonadIO m, Storable a)
=> Int -> Int -> Fold m (Array a) b -> Fold m a b
toArraysInRange low high (Fold step initial extract) =
-}
{-
-- | Fold the input to a pure buffered stream (List) of arrays.
{-# INLINE _toArraysOf #-}
_toArraysOf :: (MonadIO m, Storable a)
=> Int -> Fold m a (SerialT Identity (Array a))
_toArraysOf n = FL.lchunksOf n (A.writeNF n) FL.toStream
-}
|
harendra-kumar/asyncly
|
src/Streamly/Internal/Memory/ArrayStream.hs
|
bsd-3-clause
| 7,397 | 0 | 14 | 1,525 | 1,501 | 832 | 669 | 105 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Prompt.Theme
-- Copyright : (C) 2007 Andrea Rossato
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : unstable
-- Portability : unportable
--
-- A prompt for changing the theme of the current workspace
-----------------------------------------------------------------------------
module XMonad.Prompt.Theme
( -- * Usage
-- $usage
themePrompt,
) where
import Control.Arrow ( (&&&) )
import qualified Data.Map as M
import Data.Maybe ( fromMaybe )
import XMonad
import XMonad.Prompt
import XMonad.Layout.Decoration
import XMonad.Util.Themes
-- $usage
-- You can use this module with the following in your
-- @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Prompt
-- > import XMonad.Prompt.Theme
--
-- in your keybindings add:
--
-- > , ((modm .|. controlMask, xK_t), themePrompt defaultXPConfig)
--
-- For detailed instruction on editing the key binding see
-- "XMonad.Doc.Extending#Editing_key_bindings".
data ThemePrompt = ThemePrompt
instance XPrompt ThemePrompt where
showXPrompt ThemePrompt = "Select a theme: "
commandToComplete _ c = c
nextCompletion _ = getNextCompletion
themePrompt :: XPConfig -> X ()
themePrompt c = mkXPrompt ThemePrompt c (mkComplFunFromList' . map ppThemeInfo $ listOfThemes) changeTheme
where changeTheme t = sendMessage . SetTheme . fromMaybe defaultTheme $ M.lookup t mapOfThemes
mapOfThemes :: M.Map String Theme
mapOfThemes = M.fromList . uncurry zip . (map ppThemeInfo &&& map theme) $ listOfThemes
|
MasseR/xmonadcontrib
|
XMonad/Prompt/Theme.hs
|
bsd-3-clause
| 1,630 | 0 | 9 | 277 | 255 | 150 | 105 | 20 | 1 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE FlexibleContexts #-}
module Data.Store.Internal
( Store(..)
, StoreIndex
, Query(..)
, Selection(..)
) where
--------------------------------------------------------------------------------
import Control.Applicative
import Control.Monad.Reader
--------------------------------------------------------------------------------
import qualified Data.IntMap as IM
import qualified Data.Vector as V
import Data.Proxy
--------------------------------------------------------------------------------
import qualified Data.Store.Internal.Key as I
import qualified Data.Store.Internal.Index as I
--------------------------------------------------------------------------------
type StoreIndex = V.Vector I.Index
data Store tag k v = Store
{ storeValues :: !(IM.IntMap (v, I.KeyInternal k)) -- ^ Map from ID to (value, key).
, storeIndex :: !StoreIndex -- ^ Vector of maps from key to IDs. Each map represents a single dimension of the key.
, storeNextID :: !Int -- ^ The next ID.
}
newtype Query tag k v a = Query
{ unQuery :: Reader (Store tag k v) a
}
instance Functor (Query tag k v) where
fmap f = Query . fmap f . unQuery
instance Applicative (Query tag k v) where
pure = Query . pure
(Query f) <*> (Query r) = Query (f <*> r)
(Query r1) *> (Query r2) = Query (r1 *> r2)
(Query r1) <* (Query r2) = Query (r1 <* r2)
instance Monad (Query tag k v) where
return = Query . return
(Query r1) >>= f = Query (r1 >>= (unQuery . f))
(Query r1) >> (Query r2) = Query (r1 >> r2)
data Selection tag k where
SelectGT :: (I.ToInt n, Ord (I.DimensionType k n))
=> Proxy (tag, n) -> I.DimensionType k n -> Selection tag k
SelectLT :: (I.ToInt n, Ord (I.DimensionType k n))
=> Proxy (tag, n) -> I.DimensionType k n -> Selection tag k
SelectGTE :: (I.ToInt n, Ord (I.DimensionType k n))
=> Proxy (tag, n) -> I.DimensionType k n -> Selection tag k
SelectLTE :: (I.ToInt n, Ord (I.DimensionType k n))
=> Proxy (tag, n) -> I.DimensionType k n -> Selection tag k
SelectEQ :: (I.ToInt n, Ord (I.DimensionType k n))
=> Proxy (tag, n) -> I.DimensionType k n -> Selection tag k
SelectOR :: Selection tag k -> Selection tag k -> Selection tag k
SelectAND :: Selection tag k -> Selection tag k -> Selection tag k
SelectALL :: Selection tag k
SelectNONE :: Selection tag k
|
ekmett/data-store
|
src/Data/Store/Internal.hs
|
bsd-3-clause
| 2,592 | 0 | 14 | 648 | 836 | 451 | 385 | 54 | 0 |
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Trustworthy #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : System.IO.UTF8
-- Copyright : (c) Eric Mertens 2007
-- License : BSD3-style (see LICENSE)
--
-- Maintainer: [email protected]
-- Stability : experimental
-- Portability : portable
--
-- String IO preserving UTF8 encoding.
--
module System.IO.UTF8 (
print
, putStr
, putStrLn
, getLine
, readLn
, openBinaryFile
, withBinaryFile
, readFile
, writeFile
, appendFile
, interact
, getContents
, hGetLine
, hGetContents
, hPutStr
, hPutStrLn
) where
import Control.Monad (liftM)
import Data.Word (Word8)
import Prelude (String, (=<<), (.), map, Enum(toEnum, fromEnum), Read,
Show(..))
import System.IO (Handle, IO, FilePath, IOMode(AppendMode, ReadMode, WriteMode))
import qualified System.IO as IO
import Control.Exception (bracket)
import Codec.Binary.UTF8.String (encode, decode)
-- | Encode a string in UTF8 form.
encodeString :: String -> String
encodeString xs = bytesToString (encode xs)
-- | Decode a string from UTF8
decodeString :: String -> String
decodeString xs = decode (stringToBytes xs)
-- | Convert a list of bytes to a String
bytesToString :: [Word8] -> String
bytesToString xs = map (toEnum . fromEnum) xs
-- | String to list of bytes.
stringToBytes :: String -> [Word8]
stringToBytes xs = map (toEnum . fromEnum) xs
-- | The 'print' function outputs a value of any printable type to the
-- standard output device. This function differs from the
-- System.IO.print in that it preserves any UTF8 encoding of the shown value.
--
print :: Show a => a -> IO ()
print x = putStrLn (show x)
-- | Write a UTF8 string to the standard output device
putStr :: String -> IO ()
putStr x = IO.putStr (encodeString x)
-- | The same as 'putStr', but adds a newline character.
putStrLn :: String -> IO ()
putStrLn x = IO.putStrLn (encodeString x)
-- | Read a UTF8 line from the standard input device
getLine :: IO String
getLine = liftM decodeString IO.getLine
-- | The 'readLn' function combines 'getLine' and 'readIO', preserving UTF8
readLn :: Read a => IO a
readLn = IO.readIO =<< getLine
openBinaryFile :: FilePath -> IOMode -> IO Handle
openBinaryFile n m = IO.openBinaryFile (encodeString n) m
withBinaryFile :: FilePath -> IOMode -> (Handle -> IO a) -> IO a
withBinaryFile n m f = bracket (openBinaryFile n m) IO.hClose f
-- | The 'readFile' function reads a file and
-- returns the contents of the file as a UTF8 string.
-- The file is read lazily, on demand, as with 'getContents'.
readFile :: FilePath -> IO String
readFile n = hGetContents =<< openBinaryFile n ReadMode
-- | The computation 'writeFile' @file str@ function writes the UTF8 string @str@,
-- to the file @file@.
writeFile :: FilePath -> String -> IO ()
writeFile n s = withBinaryFile n WriteMode (\ h -> hPutStr h s)
-- | The computation 'appendFile' @file str@ function appends the UTF8 string @str@,
-- to the file @file@.
appendFile :: FilePath -> String -> IO ()
appendFile n s = withBinaryFile n AppendMode (\ h -> hPutStr h s)
-- | Read a UTF8 line from a Handle
hGetLine :: Handle -> IO String
hGetLine h = liftM decodeString (IO.hGetLine h)
-- | Lazily read a UTF8 string from a Handle
hGetContents :: Handle -> IO String
hGetContents h = liftM decodeString (IO.hGetContents h)
-- | Write a UTF8 string to a Handle.
hPutStr :: Handle -> String -> IO ()
hPutStr h s = IO.hPutStr h (encodeString s)
-- | Write a UTF8 string to a Handle, appending a newline.
hPutStrLn :: Handle -> String -> IO ()
hPutStrLn h s = IO.hPutStrLn h (encodeString s)
-- | Lazily read stdin as a UTF8 string.
getContents :: IO String
getContents = liftM decodeString IO.getContents
interact :: (String -> String) -> IO ()
interact f = IO.interact (encodeString . f . decodeString)
|
ghc/packages-utf8-string
|
System/IO/UTF8.hs
|
bsd-3-clause
| 3,953 | 0 | 10 | 772 | 919 | 508 | 411 | 66 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Golden.IO where
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.Default (def)
import qualified Data.Time.Calendar as Calendar
import Data.Time.Clock (UTCTime (..), secondsToDiffTime)
import Hakyll.Convert.Common (DistilledPost (..))
import Hakyll.Convert.IO (savePost)
import System.FilePath ((</>))
import System.IO.Temp (withSystemTempDirectory)
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.Golden (goldenVsString)
goldenTests :: TestTree
goldenTests =
testGroup
"IO.savePost"
[ writesUntitledPost,
writesPostWithTitle
]
writesUntitledPost :: TestTree
writesUntitledPost =
goldenVsString
"Writes untitled post to a file"
"test/golden/data/io-000/untitled-post.golden"
( withSystemTempDirectory "hakyll-convert" $ \tempDir -> do
let output_format = "output"
let file_extension = "html"
let post =
def
{ dpUri = "https://example.com/~joe/2011/01/02/just-testing.php",
dpDate = fromGregorian 2011 1 2 3 14 59,
dpCategories = ["Category 1", "Category 2"],
dpTags = ["Tagged", "with", "<3"],
dpBody = "<p>This tool is <em>awesome</em>!</p>"
}
-- Ignore the generated filename -- we'll just check if the file is at
-- the expected place instead.
_filename <- savePost tempDir output_format file_extension post
let filename = tempDir </> "output.html"
LBS.readFile filename
)
writesPostWithTitle :: TestTree
writesPostWithTitle =
goldenVsString
"Writes post with title to a file"
"test/golden/data/io-000/post-with-title.golden"
( withSystemTempDirectory "hakyll-convert" $ \tempDir -> do
let output_format = "output"
let file_extension = "aspx"
let post =
def
{ dpUri = "https://example.com/~joe/a%20joke.php",
dpDate = fromGregorian 1999 12 31 23 59 1,
dpCategories = ["jokes"],
dpTags = ["non-funny", "unoriginal"],
dpTitle = Just "And now for something completely different…",
dpBody = "Wonder what it is?"
}
-- Ignore the generated filename -- we'll just check if the file is at
-- the expected place instead.
_filename <- savePost tempDir output_format file_extension post
let filename = tempDir </> "output.aspx"
LBS.readFile filename
)
fromGregorian :: Integer -> Int -> Int -> Int -> Int -> Int -> UTCTime
fromGregorian year month day hours minutes seconds =
let utcDay = Calendar.fromGregorian year month day
utcSeconds = secondsToDiffTime $ fromIntegral $ seconds + 60 * (minutes + 60 * hours)
in UTCTime utcDay utcSeconds
|
kowey/hakyll-convert
|
test/golden/Golden/IO.hs
|
bsd-3-clause
| 2,860 | 0 | 16 | 766 | 571 | 318 | 253 | 60 | 1 |
module CommandParsing (
parseCommand,
ParseFailure(..)
) where
import WorldDefinition
import Control.Applicative
import Data.Char (toLower)
data ParseFailure = EmptyCommand
| UnknownCommand String
| ParseFailure String
deriving (Show)
parseCommand :: String -> Either ParseFailure Command
parseCommand = readCommand . stripFillerWords . words . (map toLower)
stripFillerWords :: [String] -> [String]
stripFillerWords = filter (`notElem` ["the", "to"])
isMoveCommand :: String -> Bool
isMoveCommand "move" = True
isMoveCommand "go" = True
isMoveCommand "walk" = True
isMoveCommand _ = False
isLookCommand :: String -> Bool
isLookCommand "look" = True
isLookCommand "peer" = True
isLookCommand "scan" = True
isLookCommand _ = False
readCommand :: [String] -> Either ParseFailure Command
readCommand [] = Left EmptyCommand
readCommand w@(cmd:xs)
| isMoveCommand(cmd) = Move <$> parseDirection xs
| isLookCommand(cmd) = case xs of
[] -> Right LookAtCurrentRoom
_ -> Look <$> parseDirection xs
| otherwise = Left $ UnknownCommand $ mconcat w
parseDirection :: [String] -> Either ParseFailure Direction
parseDirection [] = Left $ ParseFailure "no direction specified"
parseDirection ("west":_) = Right West
parseDirection ("east":_) = Right East
parseDirection ("north":_) = Right North
parseDirection ("south":_) = Right South
parseDirection _ = Left $ ParseFailure "unrecognised direction"
|
robashton/ghcmud
|
src/CommandParsing.hs
|
bsd-3-clause
| 1,516 | 0 | 10 | 316 | 463 | 241 | 222 | 39 | 2 |
module Light where
import Vector
data Light = Light Double Double Double
deriving (Show)
position :: Light -> Vector
position (Light x y z) = vector x y z
|
SaintDubious/HaskellTracer
|
src/HaskellTracer/Light.hs
|
bsd-3-clause
| 182 | 0 | 7 | 56 | 61 | 33 | 28 | 6 | 1 |
-- Copyright (c) 2015, Travis Bemann
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- o Redistributions of source code must retain the above copyright notice, this
-- list of conditions and the following disclaimer.
--
-- o Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
--
-- o Neither the name of the copyright holder nor the names of its
-- contributors may be used to endorse or promote products derived from
-- this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
{-# LANGUAGE OverloadedStrings #-}
module Network.IRC.Client.Amphibian.ConnectionManagerServer
(ConnectionManagerServer,
ConnectionManagerServerStopResponse,
new,
start,
stop,
waitStop)
where
import qualified Network.IRC.Client.Amphibian.Connection as C
import qualified Network.IRC.Client.Amphibian.User as U
import qualified Network.IRC.Client.Amphibian.Interface as I
import Network.IRC.Client.Amphibian.Types
import Network.IRC.Client.Amphibian.Commands
import Network.IRC.Client.Amphibian.Utility
import Network.IRC.Client.Amphibian.Ctcp
import Network.IRC.Client.Amphibian.Monad
import Data.Functor ((<$>))
import Control.Monad (mapM,
mapM_,
join)
import Control.Monad.IO.Class (liftIO)
import Control.Concurrent (threadDelay)
import Control.Concurrent.STM (STM,
TVar,
TChan,
TMVar,
atomically,
orElse,
newTVar,
writeTVar,
readTVar,
newBroadcastTChan,
writeTChan,
dupTChan,
newEmptyTMVar,
putTMVar
readTMVar)
import Control.Concurrent.STM.TQueue (TQueue,
newTQueue,
readTQueue,
writeTQueue)
import Control.Concurrent.Async (Async,
async,
cancel)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.UTF8 as BUTF8
import qualified Data.Text as T
-- | Create a new plugin server.
new :: STM ConnectionManagerServer
new = do
running <- newTVar Bool
actions <- newTQueue
return $ ConnectionManagerServer { cmseRunning = running,
cmseActions = actions }
-- | Start a manager server
start :: ConnectionManagerServer -> AM ()
start server = do
intf <- getInterface
join . liftIO . atomically $ do
running <- readTVar $ cmseRunning server
if not running
then do
writeTVar (cmseRunning server) True
I.registerConnectionManagerServer intf server
return . async $ runAM (runServer server)
else return $ return ()
-- | Stop plugin server.
stop :: ConnectionManagerServer -> STM ConnectionManagerServerStopResponse
stop server = do
response <- ConnectionManagerServerStopResponse <$> newEmptyTMVar
writeTVar (cmseActions server) (PlsaStop response)
return response
-- | Wait for plugin server to stop.
waitStop :: ConnectionManagerServerStopResponse -> STM (Either Error ())
waitStop (ConnectionManagerServerStopResponse response) = readTMVar response
-- | Run plugin server.
runServer :: ConnectionManagerServer -> AM ()
runServer server = do
intf <- getInterface
continue <- join . liftIO . atomically $ do
action <- readTQueue $ cmseActions server
case action of
PlsaStartConnectionManager manager (ConnectionManagerStartResponse response) -> do
active <- readTVar $ comaActive manager
if not active
then do
writeTVar (comaActive manager) True
I.registerConnectionManager intf manager
putTMVar response $ Right ()
return $ do
async $ runAM (runConnectionManager manager) intf
return True
else do
errorText <- I.lookupText intf "Connection manager is already started"
putTMVar response . Left $ Error [errorText]
return $ return True
PlsaStop -> do
I.unregisterConnectionManagerServer intf server
return $ return False
if continue
then runServer server
else return ()
-- | Connection manager thread body.
runConnectionManager :: ConnectionManager -> AM ()
runConnectionManager manager =
continue <- join . atomically $
handleAction manager intf `orElse`
handleEvent manager intf
if continue
then connectionManager manager
else return ()
-- | Handle action for connection manager.
handleAction :: ConnectionManager -> STM (AM Bool)
handleAction manager = do
action <- readTQueue $ comaActions manager
case action of
ComaStop (ConnectionManagerStopResponse response) -> do
I.unregisterConnectionManager (comaInterface manager) manager
writeTVar (comaActive manager) False
return $ return True
ComaConnectNew setup (ConnectionManagerConnectResponse response) -> do
writeTVar (comaSetup manager) $ Just setup
return $ do
result <- doConnect manager
liftIO . atomically $ putTMVar response result
return True
ComaReconnect (ConnectManagerReconnectResponse response) ->
return $ do
result <- doReconnect manager
liftIO . atomically $ putTMVar response result
return True
ComaDisconnect (ConnectionManagerDisconnectResponse response) ->
return $ do
result <- doDisconnect manager
liftIO . atomically $ putTMVar response result
return True
ComaSend message (ConnectionManagerSendResponse response) ->
return $ do
result <- doSend manager message
liftIO . atomically $ putTMVar response result
return True
-- | Handle event for connection manager.
handleEvent :: ConnectionManager -> STM (AM Bool)
handleEvent manager = do
subscription <- readTVar $ comaSubscription manager
connection <- readTVar $ comaConnection manager
case subscription of
(subscription : rest) -> do
event <- C.recv subscription
case event of
ConnLookupAddress _ -> do
writeTChan (comaEvents manager) (convertEvent event)
return $ return True
ConnFoundAddress _ -> do
writeTChan (comaEvents manager) (convertEvent event)
return $ return True
ConnLookupAddressFailed _ -> do
writeTChan (comaEvents manager) (convertEvent event)
writeTVar (comaConnection manager) Nothing
writeTVar (comaSubscription manager) rest
return $ return True
ConnFoundHostname hostName -> do
writeTChan (comaHost manager) (Just hostName)
writeTChan (comaEvents manager) (convertEvent event)
return $ return True
ConnReverseLookupFailed _ -> do
writeTChan (comaEvents manager) (convertEvent event)
return $ return True
ConnConnecting _ _ -> do
writeTChan (comaEvents manager) (convertEvent event)
return $ return True
ConnConnected _ _ -> do
writeTChan (comaEvents manager) (convertEvent event)
return $ do
intf <- getInterface
async $ runAM (register manager) intf
return True
ConnConnectFailed _ -> do
writeTChan (comaEvents manager) (convertEvent event)
writeTVar (comaConnection manager) Nothing
writeTVar (comaSubscription manager) rest
return $ return True
ConnDisconnected _ -> do
writeTChan (comaEvents manager) (convertEvent event)
writeTVar (comaConnection manager) Nothing
writeTVar (comaRegistered manager) False
writeTVar (comaHost manager) Nothing
writeTVar (comaSubscription manager) rest
return $ return True
ConnMessage message -> do
writeTChan (comaEvents manager) (convertEvent event)
handleMessage manager message
ConnMalformed _ -> do
writeTChan (comaEvents manager) (convertEvent event)
return $ return True
None -> return $ return True
-- | Handle message.
handleMessage :: ConnectionManager -> IRCMessage -> STM (AM Bool)
handleMessage manager message@(IRCMessage { ircmCommand = command })
| command == cmd_NICK = handleNick manager message
| command == cmd_PRIVMSG = handlePrivMsg manager message
| command == cmd_NOTICE = handleNotice manager message
| command == cmd_PING = handlePing manager message
| otherwise = return $ return True
-- | Handle nick message.
handleNick :: ConnectionManager -> IRCMessage -> STM (AM Bool)
handleNick manager message = do
currentNick <- getNick manager
case (extractNick $ ircmPrefix message, ircmParameters message) of
(Just oldNick, [newNick])
| oldNick == currentNick -> do
setNick manager newNick
writeTChan (comaEvents manager) $ ComaRecvSelfNick oldNick newNick
| otherwise ->
writeTChan (comaEvents manager) $ ComaRecvNick oldNick newNick
_ -> return ()
return $ return True
-- | Handle PRIVMSG message.
handlePrivMsg :: ConnectionManager -> IRCMessage -> STM (AM Bool)
handlePrivMsg manager message = do
currentNick <- getNick manager
isCtcp <- case (extractNick $ ircmPrefix prefix, ircmParameters message, ircmComment message) of
(Just fromNick, [dest], Just comment) ->
case checkCtcp comment of
Just comment -> do
writeTChan (comaEvents manager) $ ComaRecvCtcpRequest fromNick (parseChannelNameOrNick dest) comment
return True
Nothing -> return False
_ -> return false
if not isCtcp
then
case (extractNick $ ircmPrefix prefix, ircmParameters message) of
(Just fromNick, [nick]) | nick == currentNick -> do
return $ do
intf <- getInterface
liftIO . atomically $ do
users <- I.getUsers intf
nicks <- mapM U.getNick users
if fromNick `notElem` nicks
then do
user <- U.new intf manager fromNick
U.inject user $ ComaMessage message
U.start user
else return ()
return True
_ -> return $ return True
else return $ return True
-- | Handle NOTICE message.
handleNotice :: ConnectionManager -> IRCMessage -> STM (AM Bool)
handleNotice manager message = do
currentNick <- getNick manager
isCtcp <- case (extractNick $ ircmPrefix prefix, ircmParameters message, ircmComment message) of
(Just fromNick, [dest], Just comment) ->
case checkCtcp comment of
Just comment -> do
writeTChan (comaEvents manager) $ ComaRecvCtcpReply fromNick (parseChannelNameOrNick dest) comment
return True
Nothing -> return False
_ -> return False
if not isCtcp
then
case (extractNick $ ircmPrefix message, ircmParameters message, ircmComment message) of
(Just fromNick, [nick], Just comment) | nick == currentNick -> do
return $ do
intf <- getInterface
liftIO . atomically $ do
users <- I.getUsers intf
nicks <- mapM U.getNick users
if fromNick `notElem` nicks
then writeTChan (comaEvents manager) (ComaRecvNotice fromNick comment)
return True
_ -> return $ return True
else return $ return True
-- | Handle a PING message.
handlePing :: ConnectionManager -> IRCMessage -> STM (AM Bool)
handlePing manager message = do
case ircmComment message of
Just comment -> do
C.send connection $ IRCMessage { ircmPrefix = Nothing,
ircmCommand = cmd_PONG,
ircmParameters = [comment],
ircmComment = Nothing }
return $ return True
_ -> return $ return True
-- | Register with a server.
register :: ConnectionManager -> AM ()
register manager = do
connection <- fromJust <$> liftIO . atomically . readTVar $ comaConnection manager
continue <-
case continue of
True -> do
continue <- registerNick manager
case continue of
True -> registerUser manager
False -> return ()
-- | Password registration delay (in microseconds).
passwordRegistrationDelay :: Int
passwordRegistrationDelay = 1000000
-- | Register a password with the server.
registerPassword :: ConnectionManager -> AM Bool
registerPassword manager =
connection <- liftIO . atomically . readTVar $ comaConnection manager
case connection of
Just connection -> do
setup <- liftIO . atomically . readTVar $ comaSetup manager
case comaPassword setup of
Just password -> do
subscription <- liftIO . atomically $ C.subscribe connection
liftIO . atomically . C.send connection $
IRCMessage { ircmPrefix = Nothing,
ircmCommand = cmd_PASS,
ircmParameters = [password],
ircmComment = Nothing }
liftIO $ threadDelay passwordDelay
event <- liftIO . atomically $ C.peek subscription
case event of
Just (ConnMessage (IRCCommand { ircmCommand = command })) ->
| command == err_PASSWDMISMATCH -> do
liftIO . atomically $ writeTChan (comaEvents manager) (ComaPasswordMismatch password)
doDisconnect manager
return False
| command == err_YOUREBANNEDCREEP -> do
liftIO . atomically $ writeTChan (comaEvents manager) ComaBannedFromServer
doDisconnect manager
return False
_ -> return True
Nothing -> return True
Nothing -> return False
-- | Nick registration delay (in microseconds).
nickRegistrationDelay :: Int
nickRegistrationDelay = 1000000
-- | Register a nick with the server.
registerNick :: ConnectionManager -> AM Bool
registerNick manager = do
storedNicks <- comaAllNicks <$> liftIO . atomically . readTVar $ comaSetup manager
nick <- liftIO . atomically . readTVar $ comaNick manager
let nicks = unique $ case nick of
Just nick -> nick : storedNicks
Nothing -> storedNicks
registerCurrentNick manager nicks
-- | Register a particular nick with the server.
registerCurrentNick :: ConnectionManager -> [Nick] -> AM Bool
registerCurrentNick manager nicks@(nick : _) = do
liftIO . atomically $ writeTChan (comaEvents manager) (ComaAttemptingNick nick)
connection <- liftIO . atomically . readTVar $ comaConnection manager
case connection of
Just connection -> do
subscription <- liftIO . atomically $ C.subscribe connection
liftIO . atomically . C.send connection $
IRCMessage { ircmPrefix = Nothing,
ircmCommand = cmd_NICK,
ircmParameters = [nick],
ircmComment = Nothing }
liftIO $ threadDelay nickRegistrationDelay
event <- C.peek subscription
case event of
Just (ConnMessage (IRCCommand { ircmCommand = command, ircmComment = comment })) ->
| err_NICKNAMEINUSE -> registerCurrentNick manager (nextNick nicks)
| err_ERRONEUSNICKNAME ->
case nicks of
[nick] -> do
liftIO . atomically $ writeTChan (comaEvents manager) (ComaMalformedNick nick)
(_ : next) -> registerCurrentNick manager next
| err_NICKCOLLISION -> registerCurrentNick manager (nextNick nicks)
| err_UNAVAILRESOURCE -> registerCurrentNick manager (nextNick nicks)
| err_RESTRICTED -> do
setNick manager nick
return True
| err_YOUREBANNEDCREEP -> do
liftIO . atomically $ writeTChan (comaEvents manager) (ComaBannedFromServer comment)
doDisconnect manager
return False
_ -> do
liftIO . atomically $ setNick manager nick
return True
_ -> return False
-- | Get the next nick.
nextNick :: [Nick] -> [Nick]
nextNick [nick] = [B.append nick (BC.singleton '_')]
nextNick (_ : next) = next
-- | User registration delay (in microseconds).
userRegistrationDelay :: Int
userRegistrationDelay = 1000000
-- | Register a user with the server.
registerUser :: ConnectionManager -> AM ()
registerUser manager = do
setup <- liftIO . atomically . readTVar $ comaSetup manager
connection <- liftIO . atomically . readTVar $ comaConnection manager
case connection of
Just connection ->
subscription <- liftIO . atomically $ C.subscribe connection
liftIO . atomically . C.send connection $
IRCMessage { ircmPrefix = Nothing,
ircCommand = cmd_USER,
ircParameters = [comaUserName setup,
convertUserMode $ comaMode setup,
BC.singleton '*'],
ircComment = Just $ comaName setup }
liftIO $ threadDelay userRegistrationDelay
event <- liftIO . atomically $ C.recv subscription
case event of
ConnMessage message@(IRCMessage { ircmCommand = command }) ->
| command == err_YOUREBANNEDCREEP -> do
liftIO . atomically $ writeChan (comaEvents manager) ComaBannedFromServer
doDisconnect
| command == rpl_WELCOME ->
liftIO . atomically $ do
writeTVar (comaRegistered manager) True
writeTChan (comaEvents manager) (ComaWelcome $ ircmComment message)
| command == rpl_MOTDSTART ->
case ircmComment message of
Just comment -> liftIO . atomically $ writeTVar (comaMotd manager) [comment]
Nothing -> liftIO . atomically $ writeTVar (comaMotd manager) []
| command == rpl_MOTD ->
case ircmComment message of
Just comment -> liftIO . atomically $ do
motd <- readTVar $ comaMotd manager
writeTVar (comaMotd manager) (comment : motd)
Nothing -> return ()
| command == rpl_ENDOFMOTD ->
case ircmComment message of
Just comment -> liftIO . atomically $ do
motd <- readTVar $ comaMotd manager
writeTVar (comaMotd manager) []
writeTChan (comaEvents manager) (ComaMotd . reverse $ comment : motd)
Nothing -> liftIO . atomically $ do
motd <- readTVar $ comaMotd manager
writeTVar (comaMotd manager) []
writeTChan (comaEvents manager) (ComaMotd $ reverse motd)
_ -> liftIO . atomically $ writeTChan (comaEvents manager) ComaRegistrationFailed
Nothing -> return ()
-- | Convert a user mode to a single message parameter.
convertUserMode :: [UserMode] -> MessageParameter
convertUserMode mode =
let wallopsMode = if elem (BC.singleton 'w') mode then 4 else 0
invisibleMode = if elem (BC.singleton 'i') mode then 8 else 0 in
BC.pack . show $ wallopsMode + invisibleMode
-- | Actually connect to a connection.
doConnect :: ConnectionManager -> AM (Either Error ())
doConnect manager = do
setup <- liftIO . atomically . readTVar $ comaSetup manager
case setup of
Just setup -> do
connection <- liftIO . atomically . readTVar $ comaConnection manager
case connection of
Nothing -> do
connection <- liftIO . atomically $ do
connection <- C.new
writeTVar (comaConnection manager) (Just connection)
subscription <- C.subscribe connection
oldSubscription <- readTVar $ comaSubscription manager
writeTVar (comaSubscription manager) (oldSubscription ++ [subscription])
return connection
response <- liftIO $ C.connect connection (comaOriginalHost setup) (comaPort setup)
liftIO . atomically $ C.waitConnect response
Just _ -> do
errorText <- lookupText $ T.pack "Already connected to server"
return . Left $ Error [errorText]
Nothing -> do
errorText <- lookupText $ T.pack "Never connected to server"
return . Left $ Error [errorText]
-- | Actually reconnect to a connection.
doReconnect :: ConnectionManager -> AM (Either Error ())
doReconnect manager = do
connection <- liftIO . atomically . readTVar $ comaConnection manager
result <- case connection of
Just connection -> doDisconnect manager
Nothing -> return $ Right ()
case result of ->
Right () -> doConnect manager
Left error -> return $ Left error
-- | Actually disconnect from a connection.
doDisconnect :: ConnectionManager -> AM (Either Error ())
doDisconnect manager = do
connection <- liftIO . atomically . readTVar $ comaConnection manager
case connection of
Just connection ->
response <- liftIO . atomically $ C.disconnect connection
result <- liftIO . atomically $ do C.waitDisconnect response
liftIO . atomically $ writeTVar (comaConnection manager) Nothing
return result
Nothing -> do
errorText <- lookupText $ T.pack "not connected to server"
return . Left $ Error [errorText]
-- | Actually send a message to a connection.
doSend :: ConnectionManager -> IRCMessage -> AM (Either Error ())
doSend manager message = do
connection <- liftIO . atomically . readTVar $ comaConnection manager
case connection of
Just connection -> do
response <- liftIO . atomically $ C.send connection message
response' <- liftIO . atomically $ do C.waitSend response
nick <- liftIO . atomically . readTVar $ comaNick manager
case (ircmParameters message, ircmComment message) of
([dest], Just comment) ->
case checkCtcp comment of
Just comment
| ircmCommand message == cmd_PRIVMSG ->
liftIO. atomically . writeTChan (comaEvents manager) $
ComaSelfCtcpRequest nick (parseChannelNameOrNick dest) comment
| ircmCommand message == cmd_NOTICE ->
liftIO . atomically . writeTChan (comaEvents manager) $
ComaSelfCtcpReply nick (parseChannelNameOrNick dest) comment
| otherwise -> return ()
_
| ircmCommand message == cmd_PRIVMSG ->
liftIO. atomically . writeTChan (comaEvents manager) $
ComaSelfMessage nick (parseChannelNameOrNick dest) comment
| ircmCommand message == cmd_NOTICE ->
liftIO . atomically . writeTChan (comaEvents manager) $
ComaSelfNotice nick (parseChannelNameOrNick dest) comment
| otherwise -> return ()
_ ->
return response'
Nothing -> do
errorText <- lookupText $ T.pack "not connected to server"
return . Left $ Error [errorText]
-- | Convert events.
convertEvent :: ConnectionEvent -> ConnectionManagerEvent
convertEvent (ConnLookupAddress hostname) = ComaLookupAddress hostname
convertEvent (ConnFoundAddress addr) = ComaFoundAddress addr
convertEvent (ConnLookupAddressFailed error) = ComaLookupAddressFailed error
convertEvent (ConnFoundHostname hostname) = ComaFoundHostname hostname
convertEvent (ConnReverseLookupFailed error) = ComaReverseLookupFailed error
convertEvent (ConnConnecting hostname port) = ComaConnecting hostname port
convertEvent (ConnConnected hostname port) = ComaConnected hostname port
convertEvent (ConnConnectFailed error) = ComaConnectFailed error
convertEvent (ConnDisconnected error) = ComaDisconnected error
convertEvent (ConnMessage message) = ComaMessage message
convertEvent (ConnMalformed bytes) = ComaMalformed bytes
-- | Handle close for connection manager.
handleClose :: ConnectionManager -> ConnectionManagerState -> STM (AM (ConnectionManagerState, Bool))
handleClose manager state = do
ConnectionManagerStopResponse response <- readTMVar $ comaStop manager
handle <- readTVar $ comaHandle manager
putTMVar response ()
return $ do
case comaHandle state of
Just handle -> closeConnectionHandle handle
Nothing -> return ()
return (comaHandle { comaHandle = Nothing }, True)
|
tabemann/amphibian
|
src_old/Network/IRC/Client/Amphibian/ConnectionManagerServer.hs
|
bsd-3-clause
| 25,411 | 209 | 28 | 7,050 | 5,730 | 2,888 | 2,842 | -1 | -1 |
module Main where
import Control.Monad
import System.Directory
import System.Environment
import System.Exit
import System.FilePath ((</>))
import System.Posix.User (getUserEntryForName, getEffectiveUserName, UserEntry(..))
import Data.List
import CabalIndex
ensureIndex :: FilePath -> String -> Bool -> IO ()
ensureIndex dir name force =
do -- Check directory
dirExists <- doesDirectoryExist dir
unless dirExists $
do putStrLn $ "Creating: " ++ dir
createDirectory dir
-- Check db file
dbExists <- doesFileExist name
when (force || not dbExists) $
do putStrLn $ "Building database: " ++ name
buildDatabase dir name
main :: IO ()
main = do
args <- getArgs
homeDir <- homeDirectory `fmap` (getUserEntryForName =<< getEffectiveUserName)
let dbDir = homeDir </> ".cabalsearch"
dbPath = dbDir </> "packages.sqlite3"
arg = head args
-- show info
when (length args /= 1 || "--help" `elem` args) $
do name <- getProgName
putStrLn $ "usage: " ++ name ++ " searchTerm"
putStrLn $ " or: " ++ name ++ " --rebuild"
exitFailure
-- work
go (arg `isPrefixOf` "--rebuild") arg dbDir dbPath
where
go rebuild term dbDir dbPath =
do -- rebuild index
when rebuild $
do ensureIndex dbDir dbPath True
exitSuccess
-- run query
ensureIndex dbDir dbPath False
res <- queryPackages (dbDir </> dbPath) term
forM_ res $ \(name, meta) -> putStrLn $ concat
["* ", name, "\n", meta, "\n"]
|
brinchj/CabalSearch
|
Main.hs
|
bsd-3-clause
| 1,599 | 0 | 13 | 452 | 465 | 236 | 229 | 42 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- |
module Network.Wai.Middleware.StaticEmbedded (static) where
import Crypto.Hash
import Data.ByteArray.Encoding
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import Data.Maybe
import qualified Data.Text as T
import Network.HTTP.Types (status200, status304)
import Network.Mime (defaultMimeLookup)
import Network.Wai
static :: [(FilePath, B.ByteString)] -> Middleware
static files =
let files' = map computeEtag files
in \app req callback ->
fromMaybe (app req callback) $ do
let fileName = T.unpack . T.intercalate "/" $ pathInfo req
(bs, etag) <- lookup fileName files'
let mime = defaultMimeLookup (T.pack fileName)
let hdrs = computeHeaders etag
return . callback $
if Just etag == lookup "If-None-Match" (requestHeaders req)
then responseLBS status304 hdrs BL.empty
else responseLBS status200 (("Content-Type", mime) : hdrs) bs
where
computeHeaders etag =
[ ("Cache-Control", "no-transform,public,max-age=300,s-maxage=900")
, ("ETag", etag)
, ("Vary", "Accept-Encoding")
]
computeEtag
:: (FilePath, B.ByteString)
-> (FilePath, (BL.ByteString, B.ByteString))
computeEtag (fp, bs) =
let bs' = BL.fromStrict bs
in (fp, (bs', convertToBase Base16 (hashlazy bs' :: Digest SHA1)))
|
adamse/wai-middleware-static-embedded
|
src/Network/Wai/Middleware/StaticEmbedded.hs
|
bsd-3-clause
| 1,448 | 0 | 17 | 354 | 425 | 235 | 190 | 34 | 2 |
module Signal.Wavelet.Repa.LibraryBench where
import Data.Array.Repa as R
{-# INLINE benchComputeS #-}
benchComputeS :: Array D DIM1 Double -> Array U DIM1 Double
benchComputeS xs = computeS xs
{-# INLINE benchComputeP #-}
benchComputeP :: Array D DIM1 Double -> IO (Array U DIM1 Double)
benchComputeP xs = computeP xs
dataCompute :: ([Double], [Double]) -> Array D DIM1 Double
dataCompute = delay . f . snd
{-# INLINE benchCopyS #-}
benchCopyS :: Array U DIM1 Double -> Array U DIM1 Double
benchCopyS xs = copyS xs
{-# INLINE benchCopyP #-}
benchCopyP :: Array U DIM1 Double -> IO (Array U DIM1 Double)
benchCopyP xs = copyP xs
dataCopy :: ([Double], [Double]) -> Array U DIM1 Double
dataCopy = f . snd
{-# INLINE benchExtractS #-}
benchExtractS :: (DIM1, DIM1, Array U DIM1 Double) -> Array U DIM1 Double
benchExtractS (start, count, xs) = computeS . extract start count $ xs
{-# INLINE benchExtractP #-}
benchExtractP :: (DIM1, DIM1, Array U DIM1 Double) -> IO (Array U DIM1 Double)
benchExtractP (start, count, xs) = computeP . extract start count $ xs
dataExtract :: ([Double], [Double]) -> (DIM1, DIM1, Array U DIM1 Double)
dataExtract (ls, sig) = (Z :. 0, Z :. chunkSize, f sig)
where chunkSize = (length ls) `quot` 2
{-# INLINE benchAppendS #-}
benchAppendS :: (Array U DIM1 Double, Array U DIM1 Double)
-> Array U DIM1 Double
benchAppendS (xs, ys) = computeS . append xs $ ys
{-# INLINE benchAppendP #-}
benchAppendP :: (Array U DIM1 Double, Array U DIM1 Double)
-> IO (Array U DIM1 Double)
benchAppendP (xs, ys) = computeP . append xs $ ys
dataAppend :: ([Double], [Double]) -> (Array U DIM1 Double, Array U DIM1 Double)
dataAppend (_, sig) = (xs, ys)
where half = (length sig) `quot` 2
xs = fromListUnboxed (Z :. half) (take half sig)
ys = fromListUnboxed (Z :. half) (drop half sig)
{-# INLINE benchBckpermS #-}
benchBckpermS :: (DIM1, DIM1 -> DIM1, Array U DIM1 Double)
-> Array U DIM1 Double
benchBckpermS (sh, ext, xs) = computeS . backpermute sh ext $ xs
{-# INLINE benchBckpermP #-}
benchBckpermP :: (DIM1, DIM1 -> DIM1, Array U DIM1 Double)
-> IO (Array U DIM1 Double)
benchBckpermP (sh, ext, xs) = computeP . backpermute sh ext $ xs
dataBckperm :: ([Double], [Double])
-> (DIM1, DIM1 -> DIM1, Array U DIM1 Double)
dataBckperm (_, sig) = (Z :. len, id, arr)
where arr = f sig
len = size . extent $ arr
{-# INLINE benchMapS #-}
benchMapS :: (Double -> Double, Array U DIM1 Double)
-> Array U DIM1 Double
benchMapS (g, xs) = computeS . R.map g $ xs
{-# INLINE benchMapP #-}
benchMapP :: (Double -> Double, Array U DIM1 Double)
-> IO (Array U DIM1 Double)
benchMapP (g, xs) = computeP . R.map g $ xs
dataMap :: ([Double], [Double]) -> (Double -> Double, Array U DIM1 Double)
dataMap xs = (id, f . snd $ xs)
{-# INLINE benchTraverseS #-}
benchTraverseS :: (Array U DIM1 Double, DIM1 -> DIM1,
(DIM1 -> Double) -> DIM1 -> Double)
-> Array U DIM1 Double
benchTraverseS (arr, ext, g) = computeS . traverse arr ext $ g
{-# INLINE benchTraverseP #-}
benchTraverseP :: (Array U DIM1 Double, DIM1 -> DIM1,
(DIM1 -> Double) -> DIM1 -> Double)
-> IO (Array U DIM1 Double)
benchTraverseP (arr, ext, g) = computeP . traverse arr ext $ g
dataTraverse :: ([Double], [Double])
-> (Array U DIM1 Double, DIM1 -> DIM1,
(DIM1 -> Double) -> DIM1 -> Double)
dataTraverse xs = (f . snd $ xs, id, ($))
f :: [Double] -> Array U DIM1 Double
f xs = fromListUnboxed (Z :. (length xs)) xs
|
jstolarek/lattice-structure-hs
|
bench/Signal/Wavelet/Repa/LibraryBench.hs
|
bsd-3-clause
| 3,650 | 0 | 9 | 882 | 1,445 | 782 | 663 | 79 | 1 |
module HGraph.Query where
import Control.Applicative
import Control.Monad.State
import qualified Data.Map as M
import Data.Maybe
import qualified Data.Set as S
import HGraph.Edge
import HGraph.Graph
import HGraph.Label
import HGraph.Node
import HGraph.Types
findNodes :: (Node -> Bool) -> GS [Node]
findNodes f = do g <- get
return $ filter f $ M.elems $ nodes g
findLabelNodes :: Label -> (Node -> Bool) -> GS [Node]
findLabelNodes l f = do g <- get
li <- getNodeLabelIndex l
let nlins = maybe Nothing (\val -> M.lookup val $ nodeLabelInstances g) li
let ns = nodes g
let aux nid = fromJust $ M.lookup nid ns
return $ maybe [] (filter f . map aux . S.elems) nlins
findNode :: (Node -> Bool) -> GS (Maybe Node)
findNode f = aux <$> findNodes f
where
aux ns = if null ns then Nothing else Just $ head ns
findLabelNode :: Label -> (Node -> Bool) -> GS (Maybe Node)
findLabelNode l f = aux <$> findLabelNodes l f
where
aux ns = if null ns then Nothing else Just $ head ns
findNodeById :: Id -> GS (Maybe Node)
findNodeById = getNodeById
findLabelNodeById :: Label -> Id -> GS (Maybe Node)
findLabelNodeById l i = do g <- findNodeById i
li <- getNodeLabelIndex l
return $ maybe Nothing (\n -> maybe Nothing (\val -> if hasNodeLabelIndexSN val n then Just n else Nothing) li) g
findEdgeById :: Id -> GS (Maybe Edge)
findEdgeById = getEdgeById
findLabelEdgeById :: Label -> Id -> GS (Maybe Edge)
findLabelEdgeById l i = do g <- findEdgeById i
li <- getEdgeLabelIndex l
return $ maybe Nothing (\n -> maybe Nothing (\val -> if hasEdgeLabelIndexSE val n then Just n else Nothing) li) g
findNodesByProperty :: Key -> Value -> GS [Node]
findNodesByProperty k v = findNodes $ isNodePropertyEqualSN k v
findLabelNodesByProperty :: Label -> Key -> Value -> GS [Node]
findLabelNodesByProperty l k v = findLabelNodes l $ isNodePropertyEqualSN k v
findNodeByProperty :: Key -> Value -> GS (Maybe Node)
findNodeByProperty k v = findNode $ isNodePropertyEqualSN k v
findLabelNodeByProperty :: Label -> Key -> Value -> GS (Maybe Node)
findLabelNodeByProperty l k v = findLabelNode l $ isNodePropertyEqualSN k v
getNodesByEdgesE :: (Edge -> GS Node) -> GS [Edge] -> GS [(Edge, Node)]
getNodesByEdgesE f gs = do g <- get
es <- gs
return $ map (\v -> (v, unpackStateValue f g v)) es
getNodesByEdges :: (Edge -> GS Node) -> GS [Id] -> GS [(Edge, Node)]
getNodesByEdges f gs = do g <- get
is <- gs
let es = map (unpackStateValue getEdgeByIdUnsafe g) is
return $ map (\v -> (v, unpackStateValue f g v)) es
getLabelOutNodesN :: Label -> Node -> GS [(Edge, Node)]
getLabelOutNodesN l = getNodesByEdgesE getEndNodeE . getLabelOutEdgesN l
getLabelOutNodes :: Label -> Id -> GS [(Edge, Node)]
getLabelOutNodes l i = getNodeByIdUnsafe i >>= getLabelOutNodesN l
getLabelInNodesN :: Label -> Node -> GS [(Edge, Node)]
getLabelInNodesN l = getNodesByEdgesE getStartNodeN . getLabelInEdgesN l
getLabelInNodes :: Label -> Id -> GS [(Edge, Node)]
getLabelInNodes l i = getNodeByIdUnsafe i >>= getLabelInNodesN l
getAllOutNodesN :: Node -> GS [(Edge, Node)]
getAllOutNodesN = getNodesByEdgesE getEndNodeE . getAllOutEdgesN
getAllOutNodes :: Id -> GS [(Edge, Node)]
getAllOutNodes i = getNodeByIdUnsafe i >>= getAllOutNodesN
getAllInNodesN :: Node -> GS [(Edge, Node)]
getAllInNodesN = getNodesByEdgesE getStartNodeN . getAllInEdgesN
getAllInNodes :: Id -> GS [(Edge, Node)]
getAllInNodes i = getNodeByIdUnsafe i >>= getAllInNodesN
getAllNodesN :: Node -> GS [(Edge, Node)]
getAllNodesN = getNodesByEdgesE getStartNodeN . getAllEdgesN
getAllNodes :: Id -> GS [(Edge, Node)]
getAllNodes i = getNodeByIdUnsafe i >>= getAllNodesN
getOutNodesN :: (Label -> Bool) -> Node -> GS [(Edge, Node)]
getOutNodesN f = getNodesByEdgesE getEndNodeE . getOutEdgesN f
getOutNodes :: (Label -> Bool) -> Id -> GS [(Edge, Node)]
getOutNodes f i = getNodeByIdUnsafe i >>= getOutNodesN f
getInNodesN :: (Label -> Bool) -> Node -> GS [(Edge, Node)]
getInNodesN f = getNodesByEdgesE getStartNodeN . getInEdgesN f
getInNodes :: (Label -> Bool) -> Id -> GS [(Edge, Node)]
getInNodes f i = getNodeByIdUnsafe i >>= getInNodesN f
getNodesN :: (Label -> Bool) -> Node -> GS [(Edge, Node)]
getNodesN f = getNodesByEdgesE getStartNodeN . getEdgesN f
getNodes :: (Label -> Bool) -> Id -> GS [(Edge, Node)]
getNodes f i = getNodeByIdUnsafe i >>= getNodesN f
getFilteredOutNodesN :: (Edge -> Bool) -> (Label -> Bool) -> Node -> GS [(Edge, Node)]
getFilteredOutNodesN ef lf = getNodesByEdgesE getEndNodeE . getFilteredOutEdgesN ef lf
getFilteredOutNodes :: (Edge -> Bool) -> (Label -> Bool) -> Id -> GS [(Edge, Node)]
getFilteredOutNodes ef lf i = getNodeByIdUnsafe i >>= getFilteredOutNodesN ef lf
getFilteredInNodesN :: (Edge -> Bool) -> (Label -> Bool) -> Node -> GS [(Edge, Node)]
getFilteredInNodesN ef lf = getNodesByEdgesE getStartNodeN . getFilteredInEdgesN ef lf
getFilteredInNodes :: (Edge -> Bool) -> (Label -> Bool) -> Id -> GS [(Edge, Node)]
getFilteredInNodes ef lf i = getNodeByIdUnsafe i >>= getFilteredInNodesN ef lf
getFilteredNodesN :: (Edge -> Bool) -> (Label -> Bool) -> Node -> GS [(Edge, Node)]
getFilteredNodesN ef lf = getNodesByEdgesE getEndNodeE . getFilteredEdgesN ef lf
getFilteredNodes :: (Edge -> Bool) -> (Label -> Bool) -> Id -> GS [(Edge, Node)]
getFilteredNodes ef lf i = getNodeByIdUnsafe i >>= getFilteredNodesN ef lf
hasEdgeLabels :: [Label] -> Edge -> GS Bool
hasEdgeLabels ls e = do g <- get
return $ foldl (\a l -> a && evalState (hasEdgeLabelE l e) g) True ls
hasNodeLabels :: [Label] -> Node -> GS Bool
hasNodeLabels ls n = do g <- get
return $ foldl (\a l -> a && evalState (hasNodeLabelN l n) g) True ls
|
gpahal/hgraph
|
src/HGraph/Query.hs
|
bsd-3-clause
| 6,163 | 0 | 15 | 1,513 | 2,315 | 1,181 | 1,134 | 106 | 2 |
----------------------------------------------------------------------------
-- |
-- Module : ModuleWithQualifiedReexport
-- Copyright : (c) Sergey Vinokurov 2015
-- License : BSD3-style (see LICENSE)
-- Maintainer : [email protected]
----------------------------------------------------------------------------
module ModuleWithQualifiedReexport
(foo2, Quux.bar2)
where
import ModuleWithMultilineExportList as Quux
|
sergv/tags-server
|
test-data/0002export_lists/ModuleWithQualifiedReexport.hs
|
bsd-3-clause
| 437 | 0 | 5 | 54 | 26 | 20 | 6 | 3 | 0 |
module Gifter.Logging (
logTime,
logTimeWhen
) where
import Control.Monad
import Control.Monad.IO.Class
import System.Locale
import Text.Printf
import Data.Time.Clock
import Data.Time.Format
import Data.Time.LocalTime
logTime :: (MonadIO m) => String -> m ()
logTime s = do
utcTime <- liftIO getCurrentTime
tz <- liftIO $ getTimeZone utcTime
let time = utcToLocalTime tz utcTime
let formattedTime = formatTime defaultTimeLocale "%Y-%m-%d %H:%M:%S" time
liftIO . void $ printf "[%s] %s" formattedTime s
liftIO $ putStrLn ""
logTimeWhen :: (MonadIO m) => Bool -> String -> m ()
logTimeWhen cond msg = when cond $ logTime msg
|
arjantop/gifter
|
src/Gifter/Logging.hs
|
bsd-3-clause
| 661 | 0 | 10 | 129 | 214 | 109 | 105 | 20 | 1 |
{-# LANGUAGE DeriveFunctor #-}
{- |
Module : Verifier.SAW.Position
Copyright : Galois, Inc. 2012-2015
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable (language extensions)
-}
module Verifier.SAW.Position
( Pos(..)
, ppPos
, incLine
, incCol
, Positioned(..)
, PosPair(..)
) where
import System.FilePath (makeRelative)
data Pos = Pos { -- | Base directory to use for pretty printing purposes
posBase :: !FilePath
, posPath :: !FilePath
, posLine :: !Int
, posCol :: !Int
}
deriving (Show)
posTuple :: Pos -> (Int,Int,FilePath)
posTuple x = (posLine x, posCol x, posPath x)
-- Eq instance overridden to compare positions in the same file more efficiently.
instance Eq Pos where
x == y = posTuple x == posTuple y
-- Ord instance overridden to compare positions in the same file more efficiently.
instance Ord Pos where
compare x y = compare (posTuple x) (posTuple y)
ppPos :: Pos -> String
ppPos p = rp ++ ':' : show (posLine p) ++ ':' : show (posCol p) ++ ":"
where rp = makeRelative (posBase p) (posPath p)
incLine :: Pos -> Pos
incLine p = p { posLine = 1 + posLine p, posCol = 0 }
incCol :: Pos -> Pos
incCol p = p { posCol = 1 + posCol p }
class Positioned v where
pos :: v -> Pos
data PosPair v = PosPair { _pos :: !Pos, val :: !v }
deriving (Eq, Ord, Functor, Show)
instance Positioned (PosPair v) where
pos (PosPair p _) = p
|
iblumenfeld/saw-core
|
src/Verifier/SAW/Position.hs
|
bsd-3-clause
| 1,521 | 0 | 11 | 392 | 446 | 242 | 204 | 46 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_HADDOCK show-extensions #-}
{-|
Module : $Header$
Copyright : (c) 2015 Swinburne Software Innovation Lab
License : BSD3
Maintainer : Rhys Adams <[email protected]>
Stability : unstable
Portability : portable
Functions for altering 'AppState' with appropriate side effects in a
'TransitionaryState' state monad.
-}
module Eclogues.State.Monad (
-- * 'TransitionaryState'
TransitionaryState (..)
, HasTransitionaryState (..)
-- ** In a 'MonadState'
, TS
, runStateTS
, runState
-- * View
, getJob
, getDependents
-- * Mutate
, insertJob
, deleteJob
, setJobStage
, setJobSatis
, addRevDep
, removeRevDep
, schedule
-- * Load
, loadJobs
) where
import qualified Eclogues.Persist as Persist
import Eclogues.Scheduling.Command (ScheduleCommand (..))
import Eclogues.State.Types (AppState (..), jobs, revDeps)
import qualified Eclogues.State.Types as EST
import qualified Eclogues.Job as Job
import Control.Lens ((.~), (?=), (%=), (^.), (<>=), at, ix, sans, use, non)
import Control.Lens.TH (makeClassy)
import Control.Monad.State (MonadState, StateT, runStateT)
import Data.Default.Generics (Default)
import Data.Functor.Identity (Identity, runIdentity)
import qualified Data.HashMap.Lazy as HashMap
import qualified Data.List as List
import GHC.Generics (Generic)
-- | A new AppState and the side effects associated with transitioning to it.
data TransitionaryState = TransitionaryState { _appState :: AppState
, _scheduleCommands :: [ScheduleCommand]
, _persist :: Maybe (Persist.Action ()) }
deriving (Generic)
instance Default TransitionaryState
$(makeClassy ''TransitionaryState)
instance EST.HasAppState TransitionaryState where appState = appState
-- | Convenience constraint synonym.
type TS m = (MonadState TransitionaryState m)
runStateTS :: (Monad m) => AppState -> StateT TransitionaryState m a -> m (a, TransitionaryState)
runStateTS i m = runStateT m $ TransitionaryState i [] Nothing
runState :: AppState -> StateT TransitionaryState Identity a -> (a, TransitionaryState)
runState st = runIdentity . runStateTS st
schedule :: (TS m) => ScheduleCommand -> m ()
schedule cmd = do
scheduleCommands %= (cmd :)
persist <>= Just (Persist.scheduleIntent cmd)
insertJob :: (TS m) => Job.Status -> m ()
insertJob st = do
jobs . at (st ^. Job.name) ?= st
persist <>= Just (Persist.insert st)
deleteJob :: (TS m) => Job.Name -> m ()
deleteJob name = do
jobs %= sans name
persist <>= Just (Persist.delete name)
getJob :: (TS m) => Job.Name -> m (Maybe Job.Status)
getJob name = use $ jobs . at name
setJobStage :: (TS m) => Job.Name -> Job.Stage -> m ()
setJobStage name st = do
jobs . ix name %= (Job.stage .~ st)
persist <>= Just (Persist.updateStage name st)
setJobSatis :: (TS m) => Job.Name -> Job.Satisfiability -> m ()
setJobSatis name st = do
jobs . ix name %= (Job.satis .~ st)
persist <>= Just (Persist.updateSatis name st)
-- | Add a reverse dependency; second name depends on first.
addRevDep :: (TS m) => Job.Name -> Job.Name -> m ()
addRevDep on by = revDeps %= HashMap.insertWith (++) on [by]
-- | Remove a reverse dependency; the second name no longer depends on the first.
removeRevDep :: (TS m) => Job.Name -> Job.Name -> m ()
removeRevDep on by = revDeps . at on %= (>>= removed) where
removed lst = case List.delete by lst of
[] -> Nothing
a -> Just a
getDependents :: (TS m) => Job.Name -> m [Job.Name]
getDependents name = use $ revDeps . at name . non []
loadJobs :: (TS m) => [Job.Status] -> m ()
loadJobs = mapM_ $ \j -> do
let name = j ^. Job.name
jobs . at name ?= j
mapM_ (`addRevDep` name) $ j ^. Job.dependsOn
|
futufeld/eclogues
|
eclogues-mock/src/Eclogues/State/Monad.hs
|
bsd-3-clause
| 4,041 | 0 | 13 | 887 | 1,185 | 651 | 534 | -1 | -1 |
{-# OPTIONS_GHC -Wall #-}
module Main where
import Criterion.Types
import Criterion.Main
import Control.DeepSeq.Generics
import Data.Monoid((<>))
import Data.List (foldl')
import System.Random
import qualified Data.Foldable as Fold
import qualified Data.FingerTree as F
import qualified Data.Interval as I
import qualified Data.FingerTree.IntervalMap as I
myConfig = defaultConfig {
-- Always GC between runs.
forceGC = True
, timeLimit = 30.0
}
benchLS :: NFData b => String -> (a -> b) -> a -> [Benchmark]
benchLS lbl v x = [
bench ("nf-" ++ lbl) $ nf v x
, bench ("lazy-" ++ lbl) $ whnf v x
]
-- benchN :: Show a => (String -> (v -> Benchmarkable) -> v -> [Benchmark])
-- -> String -> (v -> Benchmarkable) -> (a -> v) -> a -> [Benchmark]
benchN f lbl v i n = f name v (i n)
where
name = lbl ++ "(" ++ show n ++ ")"
benchS :: NFData b => String -> (a -> b) -> a -> [Benchmark]
benchS lbl v x = [bench lbl $ nf v x]
benchL :: NFData b => String -> (a -> b) -> a -> [Benchmark]
benchL lbl v x = [bench lbl $ whnf v x]
main = do
putStr "Creating test structures . . . "
let g = mkStdGen 22
let rlist n = map (`mod` (n+1)) (take 10000 (randoms g)) :: [Int]
r10 = rlist 10
r100 = rlist 100
r1000 = rlist 1000
rnf [r10, r100, r1000] `seq` return ()
putStr "random lists ready . . . "
let tops n = map (abs . (`mod` (n+1))) (take 22000 (randoms (mkStdGen 22))) :: [Int]
bottoms = take 22000 (randoms (mkStdGen 222))
randintervals n = zipWith (\t b -> I.Interval (b `mod` (t + 1)) t) (tops n) bottoms
i0 = I.fromFoldable . flip zip [1..] $ take 1000 $ randintervals 1000 :: I.IntervalMap Int Int
idense = I.fromFoldable . flip zip [1..] $ take 1000 $ randintervals 100
isparse = I.fromFoldable . flip zip [1..] $ take 100 $ randintervals 10000
ri100 = getRandIntervals $ rlist 100 :: [I.Interval Int]
ri1000 = getRandIntervals $ rlist 1000
ri10000 = getRandIntervals $ rlist 10000
rnf [i0, idense, isparse] `seq` return ()
rnf [ri100, ri1000, ri10000] `seq` return ()
putStr "random intervals ready . . . "
putStrLn "done!"
defaultMainWith myConfig [
bgroup "insertion" (
benchN benchLS "fromFoldable" I.fromFoldable listchar 100
<> benchN benchLS "fromFoldable" I.fromFoldable listchar 1000
)
, bgroup "intersections, 1k q" [
bench "100 in [0, 10000]" $
nf (map (I.intersections isparse)) (take 1000 ri10000)
, bench "1000 in [0, 1000]" $
nf (map (I.intersections i0)) (take 1000 ri1000)
, bench "1000 in [0, 100]" $
nf (map (I.intersections idense)) (take 1000 ri100)
]
, bgroup "dominators, 1k q" [
bench "100 in [0, 10000]" $
nf (map (I.dominators isparse)) (take 1000 ri10000)
, bench "1000 in [0, 1000]" $
nf (map (I.dominators i0)) (take 1000 ri1000)
, bench "1000 in [0, 100]" $
nf (map (I.dominators idense)) (take 1000 ri100)
]
, bgroup "contents, 1k q" [
bench "100 in [0, 10000]" $
nf (map (I.contents isparse)) (take 1000 ri10000)
, bench "1000 in [0, 1000]" $
nf (map (I.contents i0)) (take 1000 ri1000)
, bench "1000 in [0, 100]" $
nf (map (I.contents idense)) (take 1000 ri100)
]
]
where
getRandIntervals :: Ord a => [a] -> [I.Interval a]
getRandIntervals lst = go [] lst
where
go acc (x:y:xs)
| x > y = go (I.Interval y x : acc) xs
| otherwise = go (I.Interval x y : acc) xs
go acc _ = acc
intervalsUpTo n = do
x <- [1..n]
y <- [x..2 * n]
return $ I.Interval x y
listchar :: Int -> [(I.Interval Int, Char)]
listchar n = force $ zip (intervalsUpTo n) (cycle ['a'..'z'])
|
peddie/fingertrees
|
bench/intervaltrees.hs
|
bsd-3-clause
| 3,835 | 0 | 17 | 1,083 | 1,501 | 768 | 733 | 84 | 2 |
{-# LANGUAGE BangPatterns #-}
module SLM.Gobble.GobbleMultiClassM where
import qualified Data.Vector.Unboxed.Mutable as VM
import qualified Data.Vector.Unboxed as V
import qualified Data.Foldable as F
import Control.Monad.Primitive
import SLM.Gobble.GobbleM
import SLM.Gobble.GobbleDataTypes
import SLM.Gobble.GobbleArgs
import SLM.DataTypes
-- PUBLIC INTERFACE
gobbleMultiClass :: GobbleArgs -> [[Predictor]] -> [Alt] -> [[Alt]] -> [Double] -> IO (V.Vector Double)
gobbleMultiClass args xs chosenAlts availableAlts ws = do
gobbleGobbleMultiClass args availGobbleXs chosenAlts availableAlts ws
where hashFunc = murmurHash (bits args)
availGobbleXs = zipWith (expandedGobbleXs hashFunc) xs availableAlts -- type [[[GobbleX]]], list by obs x alt x var
expandedGobbleXs :: (String -> Int) -> [Predictor] -> [Alt] -> [[GobbleX]]
expandedGobbleXs hashFunc predictors alts = map (\alt -> makeAltSpecificGobbleXs hashFunc predictors alt) alts
gobbleGobbleMultiClass :: GobbleArgs -> [[[GobbleX]]] -> [Alt] -> [[Alt]] -> [Double] -> IO (V.Vector Double)
gobbleGobbleMultiClass args xs chosenAlts availableAlts ws = do
wtsVec <- VM.replicate size 0
gsVec <- VM.replicate size 1
mapM_ (\(choiceSet,chosen) -> processMultiClassObs wtsVec gsVec gradFunc predFunc choiceSet chosen) observations
V.unsafeFreeze wtsVec
where size = vecSize (bits args)
gradFunc = gobbleGradientLogistic
predFunc = predictMultiClassLogit
choiceSets = zipWith MultiClassChoiceSet xs availableAlts
observations = zipWith pair choiceSets chosenAlts
type MVecD = VM.MVector RealWorld Double
data MultiClassChoiceSet = MultiClassChoiceSet {
choiceSetXs :: [[GobbleX]]
, choiceSetAlts :: [Alt]
}
data AltData = AltData {
xs :: [GobbleX]
, prediction :: Double
, actual :: Double
}
processMultiClassObs :: MVecD -> MVecD -> GradientFunc -> MultiClassPredictionFunc -> MultiClassChoiceSet -> Alt -> IO ()
processMultiClassObs wtsVec gsVec gradFunc predFunc (MultiClassChoiceSet !availXs !availAlts) !chosenAlt = do
ws <- mapM (mapM (VM.read wtsVec)) indicies
let probabilities = predFunc ws xs
let altDatas = (zipWith3 AltData availXs probabilities actuals)
mapM_ f altDatas
where indicies = map (map xIndex) availXs
xs = map (map xValue) availXs
actuals = map (\av -> if av == chosenAlt then 1.0 else 0.0) availAlts
f (AltData xs pred actual) = processObs_ wtsVec gsVec gradFunc pred (xs,actual)
type MultiClassPredictionFunc = [[Double]] -> [[Double]] -> [Double]
predictMCLogitWithChoiceSet :: V.Vector Double -> MultiClassChoiceSet -> [Double]
predictMCLogitWithChoiceSet wtsVec (MultiClassChoiceSet altXs alts) = predictMultiClassLogit ws xs
where indicies = map (map xIndex) altXs
ws = map (map (wtsVec V.!)) indicies
xs = map (map xValue) altXs
predictMultiClassLogit :: [[Double]] -> [[Double]] -> [Double]
predictMultiClassLogit altWs altXs = allocateInProportionTo $ map exp zs
where zs = zipWith sumproduct altWs altXs
predictMultiClassLogitWithPredictors :: Int -> V.Vector Double -> [Predictor] -> [Alt] -> [Double]
predictMultiClassLogitWithPredictors bits wtsVec predictors alts = predictMCLogitWithChoiceSet wtsVec choiceSet
where hashFunc = murmurHash bits
gobbleXs = expandedGobbleXs hashFunc predictors alts
choiceSet = MultiClassChoiceSet gobbleXs alts
allocateInProportionTo :: [Double] -> [Double]
allocateInProportionTo !xs = sumXs `seq` (map (\x -> x / sumXs) xs)
where sumXs = F.foldl' (+) 0 xs
|
timveitch/Gobble
|
src/SLM/Gobble/GobbleMultiClassM.hs
|
bsd-3-clause
| 3,604 | 0 | 13 | 661 | 1,094 | 587 | 507 | 63 | 2 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE TemplateHaskell #-}
module Language.Epilog.Epilog
( Byte
, Epilog
, EpilogConfig (..)
, EpilogState (..)
, Strings
, Types
, err
, get
, gets
, initialState
, mipsConfig
, modify
, runEpilog
, runEpilog'
-- State Lenses
, symbols, strings, pendProcs, types, position, input
, prevChar, bytes, scanCode, commentDepth, current, curfields
, curkind, forVars, caseTypes, offset, curProcType, structSize
, structAlign, parameters, procedures, parseOK, entryKind, curStackSize
) where
--------------------------------------------------------------------------------
import Language.Epilog.AST.Expression (VarKind (..))
import Language.Epilog.AST.Instruction
import Language.Epilog.AST.Procedure
import Language.Epilog.At
import Language.Epilog.Common
import Language.Epilog.SymbolTable hiding (empty)
import Language.Epilog.Type
--------------------------------------------------------------------------------
import Control.Lens (makeLenses, (.=))
import Control.Monad.Trans.RWS.Strict (RWST, get, gets, modify,
runRWST)
import Data.Semigroup (Max (..))
import System.IO (hPrint, stderr)
--------------------------------------------------------------------------------
-- Synonyms ----------------------------
type Strings = Map String Int32
type Pending = Map Name (At Type)
type Byte = Word8
-- | The configuration of the compiler monad.
data EpilogConfig = EpilogConfig
{ basicTypes :: Map Name (Type, Position)
, predefinedProcs :: Map String Procedure
, pointerSize :: Int
, pointerAlign :: Int }
makeLenses ''EpilogConfig
mipsConfig :: EpilogConfig
mipsConfig = EpilogConfig
{ basicTypes = mipsTypes
, predefinedProcs = mipsProcs
, pointerSize = mipsPointerSize
, pointerAlign = mipsPointerAlign }
where
mipsTypes =
[ ("boolean" , ( Basic EpBoolean 4 4, Epilog ))
, ("character", ( Basic EpCharacter 4 4, Epilog ))
, ("float" , ( Basic EpFloat 4 4, Epilog ))
, ("integer" , ( Basic EpInteger 4 4, Epilog ))
, ("string" , ( EpStr 0 1, Epilog ))
, ("void" , ( Basic EpVoid 0 0, Epilog )) ]
mipsProcs =
[ ("toBoolean" , EpiProc "toBoolean"
([(ValMode, OneOf [ charT, floatT, intT ])] :-> boolT ))
, ("toCharacter" , EpiProc "toCharacter"
([(ValMode, OneOf [ boolT, floatT, intT ])] :-> charT ))
, ("toFloat" , EpiProc "toFloat"
([(ValMode, OneOf [ boolT, charT, intT ])] :-> floatT))
, ("toInteger" , EpiProc "toInteger"
([(ValMode, OneOf [ boolT, charT, floatT ])] :-> intT )) ]
mipsPointerSize = 4
mipsPointerAlign = 4
-- | The state of the compiler monad. Includes the Lexer and Parser states.
data EpilogState = EpilogState
{ _symbols :: SymbolTable
, _entryKind :: VarKind
, _parseOK :: Bool
, _strings :: Strings
, _pendProcs :: Pending
, _types :: Types
, _current :: Maybe (At Name)
, _curfields :: Seq (At Name, Type, Int)
, _curkind :: Maybe StructKind
, _structSize :: Int
, _structAlign :: Int
, _forVars :: [(At Name, Type)]
, _caseTypes :: [At Type]
, _offset :: [Int]
, _curProcType :: Type
, _curStackSize :: Max Word32
, _procedures :: Map String Procedure
, _parameters :: Params
, _position :: Position
, _input :: String
, _prevChar :: Char
, _bytes :: [Byte]
, _scanCode :: Int
, _commentDepth :: Int }
makeLenses ''EpilogState
initialState :: String -> EpilogState
initialState inp = EpilogState
{ _symbols = emptyP Epilog
, _entryKind = Global
, _parseOK = True
, _strings = []
, _pendProcs = []
, _types = []
, _current = Nothing
, _curfields = []
, _curkind = Nothing
, _structSize = 0
, _structAlign = 0
, _forVars = []
, _caseTypes = []
, _offset = [0]
, _curProcType = None
, _curStackSize = Max 0
, _procedures = []
, _parameters = []
, _position = Position 1 1
, _input = inp
, _prevChar = '\n'
, _bytes = []
, _scanCode = 0
, _commentDepth = 0 }
-- The Monad ---------------------------
type Epilog = RWST EpilogConfig () EpilogState IO
runEpilog' :: Epilog a -> EpilogConfig -> EpilogState -> IO (a, EpilogState, ())
runEpilog' = runRWST
runEpilog :: Epilog a
-> String
-> IO (a, EpilogState, ())
runEpilog x inp = runEpilog' x mipsConfig (initialState inp)
err :: Show a => a -> Epilog ()
err msg = do
parseOK .= False
liftIO . hPrint stderr $ msg
|
adgalad/Epilog
|
src/Haskell/Language/Epilog/Epilog.hs
|
bsd-3-clause
| 5,020 | 0 | 15 | 1,475 | 1,291 | 784 | 507 | 131 | 1 |
-- | Reads OFX file on standard input. Parses it and pretty prints
-- the result to standard output.
module Main where
import Data.OFX
main :: IO ()
main = interact prettyRenderOfxFile
|
massysett/ofx
|
exe/renderOfx.hs
|
bsd-3-clause
| 189 | 0 | 6 | 36 | 29 | 17 | 12 | 4 | 1 |
{-|
Module : Game.GoreAndAsh.Actor.Collection
Description : Handling dynamic collections of actors
Copyright : (c) Anton Gushcha, 2015-2016
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : POSIX
-}
module Game.GoreAndAsh.Actor.Collection(
dynCollection
, dDynCollection
, DynCollection(..)
, module ReExport
) where
import Control.Monad
import Control.Wire
import Control.Wire.Unsafe.Event
import Data.Filterable
import Prelude hiding ((.), id)
import qualified Data.Foldable as F
import Game.GoreAndAsh
import Game.GoreAndAsh.Actor.API
import Game.GoreAndAsh.Actor.Indexed
import Game.GoreAndAsh.Actor.Collection.Data as ReExport
-- | Helper that performs monadic action over value of event or returns default value
--
-- Note: the function is isomorphic to @Data.Maybe.maybe@
onEvent :: Monad m => b -> Event a -> (a -> m b) -> m b
onEvent def e f = case e of
NoEvent -> return def
Event a -> f a
-- | Makes dynamic collection of wires.
--
-- * First input of wire is input for each inner wire.
--
-- * Second input is event for adding several wires to collection.
--
-- * Third input is event for removing several wires from collection.
--
-- * Wire returns list of outputs of inner wires.
--
-- Note: if ihibits one of the wires, it is removed from output result during its inhibition
dynCollection :: forall m i a b c c2 . (ActorMonad m, Eq i, DynCollection c, FilterConstraint c (GameWireIndexed m i a b), FilterConstraint c (Either () b), F.Foldable c2) =>
(c (GameActor m i a b)) -- ^ Inital set of wires
-> GameWire m (a, Event (c (GameActor m i a b)), Event (c2 i)) (c b)
dynCollection initialActors = mkGen $ \ds input -> do
arrs <- sequence initialActors
go arrs ds input
where
go :: c (GameWireIndexed m i a b)
-> GameTime
-> (a, Event (c (GameActor m i a b)), Event (c2 i))
-> GameMonadT m (Either () (c b), GameWire m (a, Event (c (GameActor m i a b)), Event (c2 i)) (c b))
go currentWires ds (a, addEvent, removeEvent) = do
-- Adding new wires
newAddedWires <- onEvent currentWires addEvent $ \newActors -> do
addWires <- sequence newActors
return $ currentWires `concatDynColl` addWires
-- Removing wires
newRemovedWires <- onEvent newAddedWires removeEvent $ \ids ->
return $ F.foldl' (\acc i -> fFilter ((/= i) . indexedId) acc) newAddedWires ids
-- Calculating outputs
(bs, newWiresCntrls) <- liftM unzipDynColl $ mapM (\w -> stepWire w ds (Right a)) $ indexedWire <$> newRemovedWires
let newWires = uncurry updateIndexedWire <$> (fmap const newWiresCntrls `zipDynColl` newRemovedWires)
return $ length newWires `seq` (Right (rightsDynColl bs), mkGen $ go newWires)
-- | Makes dynamic collection of wires.
--
-- * First input of wire is input for each inner wire.
--
-- * Second input is event for adding several wires to collection.
--
-- * Third input is event for removing several wires from collection.
--
-- * Wire returns list of outputs of inner wires.
--
-- Note: it is delayed version of dynCollection, removing and adding of agents performs on next step after current.
--
-- Note: if ihibits one of the wires, it is removed from output result while it inhibits.
dDynCollection :: forall m i a b c c2 . (ActorMonad m, Eq i, DynCollection c, FilterConstraint c (GameWireIndexed m i a b), FilterConstraint c (Either () b), F.Foldable c2) =>
(c (GameActor m i a b)) -- ^ Inital set of wires
-> GameWire m (a, Event (c (GameActor m i a b)), Event (c2 i)) (c b)
dDynCollection initialActors = mkGen $ \ds input -> do
arrs <- sequence initialActors
go arrs ds input
where
go :: c (GameWireIndexed m i a b)
-> GameTime
-> (a, Event (c (GameActor m i a b)), Event (c2 i))
-> GameMonadT m (Either () (c b), GameWire m (a, Event (c (GameActor m i a b)), Event (c2 i)) (c b))
go currentWires ds (a, addEvent, removeEvent) = do
-- Calculating outputs
(bs, newWiresCntrls) <- liftM unzipDynColl $ mapM (\w -> stepWire w ds (Right a)) $ indexedWire <$> currentWires
let newWires = uncurry updateIndexedWire <$> (fmap const newWiresCntrls `zipDynColl` currentWires)
-- Adding new wires
newAddedWires <- onEvent newWires addEvent $ \newActors -> do
addWires <- sequence newActors
return $ newWires `concatDynColl` addWires
-- Removing wires
newRemovedWires <- onEvent newAddedWires removeEvent $ \ids ->
return $ F.foldl' (\acc i -> fFilter ((/= i) . indexedId) acc) newAddedWires ids
return $ length newRemovedWires `seq` (Right (rightsDynColl bs), mkGen $ go newRemovedWires)
|
Teaspot-Studio/gore-and-ash-actor
|
src/Game/GoreAndAsh/Actor/Collection.hs
|
bsd-3-clause
| 4,650 | 0 | 19 | 960 | 1,391 | 740 | 651 | -1 | -1 |
module Env
( module Env
, module Args
)
where
import Control.Monad
import Control.Exception as E
import Control.Exception.Extra
import Args
import System.Err
import Paths_clean_home
data Env = Env
{ appArgs :: Args
, pkgList :: PkgList
, prgList :: PrgList
}
type PrgList = [ (String, [ String ]) ]
type PkgList = [ (String, [ String ]) ]
type ConfigSearchCmd = Env -> FilePath -> String -> IO (Either String Bool)
-- | Reads and returns the program list contained in
-- the file PrgList.
readPrgList :: Args -> IO (Maybe PrgList)
readPrgList appArgs
| byProgram appArgs = readListFP "data/PrgList" "program list"
| otherwise = return $ Just []
-- | Reads and returns the package list contained in
-- the file PrgList.
readPkgList :: Args -> IO (Maybe PkgList)
readPkgList appArgs
| byPackage appArgs = readListFP "data/PkgList" "package list"
| otherwise = return $ Just []
readListFP :: Read a => FilePath -> String -> IO (Maybe [a])
readListFP fn n = do
fp <- getDataFileName fn
list <- E.handle (anyway (return [])) $
liftM reads $ readFile fp
case list of
[] -> do putErrLn $ "Cannot read or parse " ++ n
return Nothing
((x,_):_) -> return (Just x)
-- FIXME: MaybeT?
buildEnv :: Args -> IO (Maybe Env)
buildEnv appArgs
| not (pkg || prg)
= do putErrLn "You must enable --by-package or --by-program"
return Nothing
| otherwise
= do pkgList <- readPkgList appArgs
prgList <- readPrgList appArgs
case (pkgList, prgList) of
(Just pkgs, Just prgs) -> return $ Just $ Env appArgs pkgs prgs
_ -> return Nothing
where pkg = byPackage appArgs
prg = byProgram appArgs
|
ivanperez-keera/clean-home
|
src/Env.hs
|
bsd-3-clause
| 1,737 | 0 | 15 | 447 | 564 | 286 | 278 | 46 | 2 |
module Text.Highlighter.Lexers.PostScript (lexer) where
import Text.Regex.PCRE.Light
import Text.Highlighter.Types
lexer :: Lexer
lexer = Lexer
{ lName = "PostScript"
, lAliases = ["postscript"]
, lExtensions = [".ps", ".eps"]
, lMimetypes = ["application/postscript"]
, lStart = root'
, lFlags = [multiline]
}
root' :: TokenMatcher
root' =
[ tok "^%!.+\\n" (Arbitrary "Comment" :. Arbitrary "Preproc")
, tok "%%.*\\n" (Arbitrary "Comment" :. Arbitrary "Special")
, tok "(^%.*\\n){2,}" (Arbitrary "Comment" :. Arbitrary "Multiline")
, tok "%.*\\n" (Arbitrary "Comment" :. Arbitrary "Single")
, tokNext "\\(" (Arbitrary "Literal" :. Arbitrary "String") (GoTo stringliteral')
, tok "[\\{\\}(\\<\\<)(\\>\\>)\\[\\]]" (Arbitrary "Punctuation")
, tok "<[0-9A-Fa-f]+>(?=[\\(\\)\\<\\>\\[\\]\\{\\}\\/\\%\\s])" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Hex")
, tok "[0-9]+\\#(\\-|\\+)?([0-9]+\\.?|[0-9]*\\.[0-9]+|[0-9]+\\.[0-9]*)((e|E)[0-9]+)?(?=[\\(\\)\\<\\>\\[\\]\\{\\}\\/\\%\\s])" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Oct")
, tok "(\\-|\\+)?([0-9]+\\.?|[0-9]*\\.[0-9]+|[0-9]+\\.[0-9]*)((e|E)[0-9]+)?(?=[\\(\\)\\<\\>\\[\\]\\{\\}\\/\\%\\s])" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Float")
, tok "(\\-|\\+)?[0-9]+(?=[\\(\\)\\<\\>\\[\\]\\{\\}\\/\\%\\s])" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Integer")
, tok "\\/[^\\(\\)\\<\\>\\[\\]\\{\\}\\/\\%\\s]+(?=[\\(\\)\\<\\>\\[\\]\\{\\}\\/\\%\\s])" (Arbitrary "Name" :. Arbitrary "Variable")
, tok "[^\\(\\)\\<\\>\\[\\]\\{\\}\\/\\%\\s]+(?=[\\(\\)\\<\\>\\[\\]\\{\\}\\/\\%\\s])" (Arbitrary "Name" :. Arbitrary "Function")
, tok "(false|true)(?=[\\(\\)\\<\\>\\[\\]\\{\\}\\/\\%\\s])" (Arbitrary "Keyword" :. Arbitrary "Constant")
, tok "(eq|ne|ge|gt|le|lt|and|or|not|if|ifelse|for|forall)(?=[\\(\\)\\<\\>\\[\\]\\{\\}\\/\\%\\s])" (Arbitrary "Keyword" :. Arbitrary "Reserved")
, tok "(abs|add|aload|arc|arcn|array|atan|begin|bind|ceiling|charpath|clip|closepath|concat|concatmatrix|copy|cos|currentlinewidth|currentmatrix|currentpoint|curveto|cvi|cvs|def|defaultmatrix|dict|dictstackoverflow|div|dtransform|dup|end|exch|exec|exit|exp|fill|findfont|floor|get|getinterval|grestore|gsave|gt|identmatrix|idiv|idtransform|index|invertmatrix|itransform|length|lineto|ln|load|log|loop|matrix|mod|moveto|mul|neg|newpath|pathforall|pathbbox|pop|print|pstack|put|quit|rand|rangecheck|rcurveto|repeat|restore|rlineto|rmoveto|roll|rotate|round|run|save|scale|scalefont|setdash|setfont|setgray|setlinecap|setlinejoin|setlinewidth|setmatrix|setrgbcolor|shfill|show|showpage|sin|sqrt|stack|stringwidth|stroke|strokepath|sub|syntaxerror|transform|translate|truncate|typecheck|undefined|undefinedfilename|undefinedresult)(?=[\\(\\)\\<\\>\\[\\]\\{\\}\\/\\%\\s])" (Arbitrary "Name" :. Arbitrary "Builtin")
, tok "\\s+" (Arbitrary "Text")
]
stringliteral' :: TokenMatcher
stringliteral' =
[ tok "[^\\(\\)\\\\]+" (Arbitrary "Literal" :. Arbitrary "String")
, tokNext "\\\\" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Escape") (GoTo escape')
, tokNext "\\(" (Arbitrary "Literal" :. Arbitrary "String") Push
, tokNext "\\)" (Arbitrary "Literal" :. Arbitrary "String") Pop
]
escape' :: TokenMatcher
escape' =
[ tokNext "([0-8]{3}|n|r|t|b|f|\\\\|\\(|\\)|)" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Escape") Pop
]
|
chemist/highlighter
|
src/Text/Highlighter/Lexers/PostScript.hs
|
bsd-3-clause
| 3,445 | 0 | 10 | 375 | 610 | 310 | 300 | 38 | 1 |
-- This is double using function overloading.
double :: Num a => a -> a
double x = x+x
-- This is quad, using function composition and overloading. Like a boss.
quad :: Num a => a -> a
quad = double . double
-- This is sparta.
factorial :: Integer -> Integer
factorial n = product [1..n]
-- Average with an operator instead of a function
average :: [Int] -> Int
average numbers = (sum numbers) `div` (length numbers)
{-
here, the type signature has to be [Int] -> Int and not
Num a => [a] -> Int because even though sum has signature
sum :: Num a => [a] -> a, suggesting that numbers could be a Num a => [a],
length has signature length :: [a] -> Int, producing Ints. Now, div, which is
the last function to be executed, has type div :: Integral a => a -> a -> a
meaning that as long as a is an Integral, all its arguments must have the
same type a. Since Int is an Integral and a Num, Int has to be used.
-}
-- Another way to get last, function composition like a pro
newLast :: [a] -> a
newLast = head.reverse
-- Yet another way to get last, recursion, the old fashioned way
anotherLast :: [a] -> a
anotherLast [x] = x
anotherLast (x:xs) = anotherLast xs
-- Another way to do init, the weirdestly named function ever
newInit :: [a] -> [a]
newInit xs = take (length xs -1) xs
-- And yet another way because we cannot get enough of this
anotherInit :: [a] -> [a]
anotherInit [x,y] = [x]
anotherInit (x:xs) = x:anotherInit xs
|
decomputed/haskellLaboratory
|
programmingInHaskell/chapter02.hs
|
mit
| 1,446 | 2 | 8 | 303 | 320 | 161 | 159 | 18 | 1 |
{-# LANGUAGE CPP #-}
-- Copyright (c) 2010, Diego Souza
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
-- * Neither the name of the <ORGANIZATION> nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-- ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-- WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
module Test.Yql.Core.LocalFunctions.Tree where
#define eq assertEqual (__FILE__ ++":"++ show __LINE__)
#define ok assertBool (__FILE__ ++":"++ show __LINE__)
import Yql.Data.PPrint
import Yql.Core.Types
import Yql.Core.LocalFunctions.Tree
import Yql.Core.LocalFunction
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit (assertBool, assertEqual)
test0 = testCase ".tree() for `simple' results (i.e. rows without attributes)" $
do { output <- execTransformM [] (function render) xml
; eq tree output
}
where tree = init . unlines $ [ "Results"
, "├─ row: "
, "│ ├─ foo: foo"
, "│ ├─ bar: bar"
, "├─ row: "
, "│ ├─ foo: foo"
, "│ ├─ bar: bar"
]
xml = unlines [ "<?xml version=\"1.0\"?>"
, "<results>"
, "<row>"
, "<foo>foo</foo>"
, "<bar>bar</bar>"
, "</row>"
, "<row>"
, "<foo>foo</foo>"
, "<bar>bar</bar>"
, "</row>"
, "</results>"
]
test1 = testCase ".tree() for `complex' results (i.e. rows with attributes)" $
do { output <- execTransformM [] (function render) xml
; eq tree output
}
where tree = init . unlines $ [ "Results"
, "├─ row: "
, "│ ├─ foo: "
, "│ │ ├─ field_a: field_a"
, "│ │ │ ├─ @attr: attr#0"
, "│ │ ├─ field_b: field_b"
, "│ ├─ bar: bar"
, "├─ row: "
, "│ ├─ foo: foo"
, "│ ├─ bar: "
, "│ │ ├─ field_a: field_a"
, "│ │ ├─ field_b: field_b"
, "│ │ │ ├─ @attr: attr#1"
, "│ │ ├─ field_c: field_c"
]
xml = unlines [ "<?xml version=\"1.0\"?>"
, "<results>"
, "<row>"
, "<foo><field_a attr=\"attr#0\">field_a</field_a><field_b>field_b</field_b></foo>"
, "<bar>bar</bar>"
, "</row>"
, "<row>"
, "<foo>foo</foo>"
, "<bar><field_a>field_a</field_a><field_b attr=\"attr#1\">field_b</field_b><field_c>field_c</field_c></bar>"
, "</row>"
, "</results>"
]
test2 = testCase ".tree() returns raw input when input is not a valid xml" $
do { output <- execTransformM [] (function render) "foobar"
; eq "foobar" output
}
test3 = testCase ".tree() returns raw input when input is a valid xml but there is no results element" $
do { output <- execTransformM [] (function render) xml
; eq xml output
}
where xml = unlines [ "<?xml version=\"1.0\"?>"
, "<foobar />"
]
suite :: [Test]
suite = [ testGroup "Tables.hs" [ test0
, test1
, test2
, test3
]
]
|
dgvncsz0f/iyql
|
src/test/haskell/Test/Yql/Core/LocalFunctions/Tree.hs
|
gpl-3.0
| 5,407 | 0 | 11 | 2,142 | 491 | 295 | 196 | 71 | 1 |
module Response (module X) where
import Response.About as X
import Response.Calendar as X
import Response.Draw as X
import Response.Graph as X
import Response.Grid as X
import Response.Image as X
import Response.Loading as X
import Response.NotFound as X
import Response.Post as X
import Response.Privacy as X
import Response.Search as X
import Response.Export as X
|
hermish/courseography
|
app/Response.hs
|
gpl-3.0
| 367 | 0 | 4 | 54 | 94 | 67 | 27 | 13 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.AttachNetworkInterface
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Attaches a network interface to an instance.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-AttachNetworkInterface.html>
module Network.AWS.EC2.AttachNetworkInterface
(
-- * Request
AttachNetworkInterface
-- ** Request constructor
, attachNetworkInterface
-- ** Request lenses
, aniDeviceIndex
, aniDryRun
, aniInstanceId
, aniNetworkInterfaceId
-- * Response
, AttachNetworkInterfaceResponse
-- ** Response constructor
, attachNetworkInterfaceResponse
-- ** Response lenses
, anirAttachmentId
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data AttachNetworkInterface = AttachNetworkInterface
{ _aniDeviceIndex :: Int
, _aniDryRun :: Maybe Bool
, _aniInstanceId :: Text
, _aniNetworkInterfaceId :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'AttachNetworkInterface' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'aniDeviceIndex' @::@ 'Int'
--
-- * 'aniDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'aniInstanceId' @::@ 'Text'
--
-- * 'aniNetworkInterfaceId' @::@ 'Text'
--
attachNetworkInterface :: Text -- ^ 'aniNetworkInterfaceId'
-> Text -- ^ 'aniInstanceId'
-> Int -- ^ 'aniDeviceIndex'
-> AttachNetworkInterface
attachNetworkInterface p1 p2 p3 = AttachNetworkInterface
{ _aniNetworkInterfaceId = p1
, _aniInstanceId = p2
, _aniDeviceIndex = p3
, _aniDryRun = Nothing
}
-- | The index of the device for the network interface attachment.
aniDeviceIndex :: Lens' AttachNetworkInterface Int
aniDeviceIndex = lens _aniDeviceIndex (\s a -> s { _aniDeviceIndex = a })
aniDryRun :: Lens' AttachNetworkInterface (Maybe Bool)
aniDryRun = lens _aniDryRun (\s a -> s { _aniDryRun = a })
-- | The ID of the instance.
aniInstanceId :: Lens' AttachNetworkInterface Text
aniInstanceId = lens _aniInstanceId (\s a -> s { _aniInstanceId = a })
-- | The ID of the network interface.
aniNetworkInterfaceId :: Lens' AttachNetworkInterface Text
aniNetworkInterfaceId =
lens _aniNetworkInterfaceId (\s a -> s { _aniNetworkInterfaceId = a })
newtype AttachNetworkInterfaceResponse = AttachNetworkInterfaceResponse
{ _anirAttachmentId :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'AttachNetworkInterfaceResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'anirAttachmentId' @::@ 'Maybe' 'Text'
--
attachNetworkInterfaceResponse :: AttachNetworkInterfaceResponse
attachNetworkInterfaceResponse = AttachNetworkInterfaceResponse
{ _anirAttachmentId = Nothing
}
-- | The ID of the network interface attachment.
anirAttachmentId :: Lens' AttachNetworkInterfaceResponse (Maybe Text)
anirAttachmentId = lens _anirAttachmentId (\s a -> s { _anirAttachmentId = a })
instance ToPath AttachNetworkInterface where
toPath = const "/"
instance ToQuery AttachNetworkInterface where
toQuery AttachNetworkInterface{..} = mconcat
[ "DeviceIndex" =? _aniDeviceIndex
, "DryRun" =? _aniDryRun
, "InstanceId" =? _aniInstanceId
, "NetworkInterfaceId" =? _aniNetworkInterfaceId
]
instance ToHeaders AttachNetworkInterface
instance AWSRequest AttachNetworkInterface where
type Sv AttachNetworkInterface = EC2
type Rs AttachNetworkInterface = AttachNetworkInterfaceResponse
request = post "AttachNetworkInterface"
response = xmlResponse
instance FromXML AttachNetworkInterfaceResponse where
parseXML x = AttachNetworkInterfaceResponse
<$> x .@? "attachmentId"
|
kim/amazonka
|
amazonka-ec2/gen/Network/AWS/EC2/AttachNetworkInterface.hs
|
mpl-2.0
| 4,825 | 0 | 9 | 1,077 | 626 | 378 | 248 | 74 | 1 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2014 Galois, Inc.
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
{-# LANGUAGE Safe #-}
{-# LANGUAGE PatternGuards #-}
module Cryptol.Utils.PP
( PP(..)
, pp
, pretty
, optParens
, ppInfix
, Assoc(..)
, Infix(..)
, module Text.PrettyPrint
, ordinal
, ordSuffix
, commaSep
) where
import Text.PrettyPrint
class PP a where
ppPrec :: Int -> a -> Doc
pp :: PP a => a -> Doc
pp = ppPrec 0
pretty :: PP a => a -> String
pretty = show . pp
optParens :: Bool -> Doc -> Doc
optParens b body | b = parens body
| otherwise = body
-- | Information about associativity.
data Assoc = LeftAssoc | RightAssoc | NonAssoc
deriving (Show,Eq)
-- | Information about an infix expression of some sort.
data Infix op thing = Infix
{ ieOp :: op -- ^ operator
, ieLeft :: thing -- ^ left argument
, ieRight :: thing -- ^ right argumrnt
, iePrec :: Int -- ^ operator precedence
, ieAssoc :: Assoc -- ^ operator associativity
}
commaSep :: [Doc] -> Doc
commaSep = fsep . punctuate comma
-- | Pretty print an infix expression of some sort.
ppInfix :: (PP thing, PP op)
=> Int -- ^ Non-infix leaves are printed with this precedence
-> (thing -> Maybe (Infix op thing))
-- ^ pattern to check if sub-thing is also infix
-> Infix op thing -- ^ Pretty print this infix expression
-> Doc
ppInfix lp isInfix expr =
sep [ ppSub (wrapSub LeftAssoc ) (ieLeft expr) <+> pp (ieOp expr)
, ppSub (wrapSub RightAssoc) (ieRight expr) ]
where
wrapSub dir p = p < iePrec expr || p == iePrec expr && ieAssoc expr /= dir
ppSub w e
| Just e1 <- isInfix e = optParens (w (iePrec e1)) (ppInfix lp isInfix e1)
ppSub _ e = ppPrec lp e
-- | Display a numeric values as an ordinar (e.g., 2nd)
ordinal :: (Integral a, Show a, Eq a) => a -> Doc
ordinal x = text (show x) <> text (ordSuffix x)
-- | The suffix to use when displaying a number as an oridinal
ordSuffix :: (Integral a, Eq a) => a -> String
ordSuffix n0 =
case n `mod` 10 of
1 | notTeen -> "st"
2 | notTeen -> "nd"
3 | notTeen -> "rd"
_ -> "th"
where
n = abs n0
m = n `mod` 100
notTeen = m < 11 || m > 19
|
TomMD/cryptol
|
src/Cryptol/Utils/PP.hs
|
bsd-3-clause
| 2,402 | 0 | 12 | 714 | 699 | 376 | 323 | 58 | 4 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
module Stackage.InstallInfo
( getInstallInfo
, bpPackageList
) where
import Control.Monad (forM_, unless)
import Data.List (foldl')
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Time (getCurrentTime, formatTime)
import Data.Version (showVersion)
import qualified Distribution.Text
import Distribution.Version (simplifyVersionRange, withinRange)
import Stackage.GhcPkg
import Stackage.LoadDatabase
import Stackage.NarrowDatabase
import Stackage.ServerFiles
import Stackage.Types
import Stackage.Util
import System.Directory (createDirectoryIfMissing)
import System.FilePath ((</>))
import qualified System.IO as IO
import qualified System.IO.UTF8
import System.Locale (defaultTimeLocale)
import System.Exit (exitFailure)
dropExcluded :: SelectSettings
-> Map PackageName (VersionRange, Maintainer)
-> Map PackageName (VersionRange, Maintainer)
dropExcluded bs m0 =
foldl' (flip Map.delete) m0 (Set.toList $ excludedPackages bs)
getInstallInfo :: SelectSettings -> IO InstallInfo
getInstallInfo settings = do
core <- do
putStrLn "Loading core packages from global database"
getGlobalPackages $ selectGhcVersion settings
underlay <- getDBPackages (selectUnderlayPackageDirs settings) (selectGhcVersion settings)
let underlaySet = Set.map pkgName underlay
coreMap = Map.unions
$ map (\(PackageIdentifier k v) -> Map.singleton k v)
$ Set.toList core
allPackages' =
stablePackages settings $ requireHaskellPlatform settings
allPackages = dropExcluded settings allPackages'
totalCore
| ignoreUpgradeableCore settings =
Map.fromList $ map (\n -> (PackageName n, Nothing)) $ words "base containers template-haskell"
| otherwise =
Map.fromList (map (\(PackageIdentifier p v) -> (p, Just v)) (Set.toList core))
`Map.union` Map.fromList (map (, Nothing) (Set.toList $ extraCore settings))
putStrLn "Loading package database"
pdb <- loadPackageDB settings coreMap (Map.keysSet totalCore) allPackages underlaySet
putStrLn "Narrowing package database"
(final, errs) <- narrowPackageDB settings (Map.keysSet totalCore) pdb $ Set.fromList $ Map.toList $ Map.map snd $ allPackages
putStrLn "Printing build plan to build-plan.log"
System.IO.UTF8.writeFile "build-plan.log" $ unlines $ map showDep $ Map.toList final
System.IO.UTF8.writeFile "hackage-map.txt" $ unlines $ map showHackageMap $ Map.toList final
unless (Set.null errs) $ do
putStrLn "Build plan requires some disallowed packages"
mapM_ putStrLn $ Set.toList errs
exitFailure
putStrLn "Checking for bad versions"
case checkBadVersions settings coreMap pdb final of
badVersions
| Map.null badVersions -> return ()
| otherwise -> do
forM_ (Map.toList badVersions) $ \(user, badDeps) -> do
putStrLn $ user ++ " cannot use: "
forM_ (Map.toList badDeps) $ \(name, (version, range)) -> do
putStrLn $ concat
[ "- "
, packageVersionString (name, version)
, " -- "
, Distribution.Text.display $ simplifyVersionRange range
]
putStrLn ""
error "Conflicting build plan, exiting"
let ii = InstallInfo
{ iiCore = totalCore
, iiPackages = Map.map biToSPI final
, iiOptionalCore = Map.empty
, iiPackageDB = pdb
}
forM_ [False, True] $ \isInc -> do
let incexc = if isInc then "inclusive" else "exclusive"
now <- getCurrentTime
let ghcVer =
let GhcMajorVersion x y = selectGhcVersion settings
in show x ++ "." ++ show y
date = formatTime defaultTimeLocale "%Y-%m-%d" now
createDirectoryIfMissing True incexc
putStrLn $ "Inclusive/exclusive: " ++ incexc
putStrLn "Creating hackage file (for publishing to Stackage server)"
let isHP = requireHaskellPlatform settings
IO.withBinaryFile (incexc </> "hackage") IO.WriteMode $ \hackageH ->
IO.withBinaryFile (incexc </> "create-snapshot.sh") IO.WriteMode
(createHackageFile isInc isHP ii ghcVer date hackageH)
putStrLn "Creating desc file (for publishing to Stackage server)"
System.IO.UTF8.writeFile (incexc </> "desc") $ concat
[ "Stackage build for GHC "
, ghcVer
, if requireHaskellPlatform settings
then " + Haskell Platform"
else ""
, ", "
, date
, ", "
, incexc
, "\nGenerated on "
, show now
]
System.IO.UTF8.writeFile (incexc </> "slug") $ concat
[ date
, "-ghc"
, ghcVer
, if requireHaskellPlatform settings then "hp" else ""
, if isInc then "-inc" else "-exc"
]
return ii
biToSPI :: BuildInfo -> SelectedPackageInfo
biToSPI BuildInfo {..} = SelectedPackageInfo
{ spiVersion = biVersion
, spiMaintainer = biMaintainer
, spiGithubUser = biGithubUser
, spiHasTests = biHasTests
}
showDep :: (PackageName, BuildInfo) -> String
showDep (PackageName name, BuildInfo {..}) =
concat
[ name
, "-"
, showVersion biVersion
, " ("
, unMaintainer biMaintainer
, " " ++ githubMentions biGithubUser
, ")"
, ": "
, unwords $ map unP biUsers
]
where
unP (PackageName p) = p
-- | Convert to format used by Hackage for displaying distribution versions.
-- For more info, see https://github.com/fpco/stackage/issues/38.
showHackageMap :: (PackageName, BuildInfo) -> String
showHackageMap (PackageName name, BuildInfo {..}) =
show (name, showVersion biVersion, Nothing :: Maybe String)
bpPackageList :: BuildPlan -> [String]
bpPackageList = map packageVersionString . Map.toList . Map.map spiVersion . bpPackages
-- | Check for internal mismatches in required and actual package versions.
checkBadVersions :: SelectSettings
-> Map PackageName Version -- ^ core
-> PackageDB
-> Map PackageName BuildInfo
-> Map String (Map PackageName (Version, VersionRange))
checkBadVersions settings core (PackageDB pdb) buildPlan =
Map.unions $ map getBadVersions $ Map.toList $ Map.filterWithKey unexpectedFailure buildPlan
where
unexpectedFailure name _ = name `Set.notMember` expectedFailures settings
getBadVersions :: (PackageName, BuildInfo) -> Map String (Map PackageName (Version, VersionRange))
getBadVersions (name, bi)
| Map.null badVersions = Map.empty
| otherwise = Map.singleton display badVersions
where
badVersions = Map.unions $ map (uncurry checkPackage) $ Map.toList $ biDeps bi
display = concat
[ packageVersionString (name, biVersion bi)
, " ("
, unMaintainer $ biMaintainer bi
, case Map.lookup name pdb of
Just PackageInfo { piGithubUser = gus } -> " " ++ githubMentions gus
_ -> ""
, ")"
]
checkPackage :: PackageName -> VersionRange -> Map PackageName (Version, VersionRange)
checkPackage name vr =
case Map.lookup name buildPlan of
Nothing ->
case Map.lookup name core of
-- Might be part of extra-core
Nothing -> Map.empty
Just version
| version `withinRange` vr -> Map.empty
| otherwise -> Map.singleton name (version, vr)
Just bi
| biVersion bi `withinRange` vr -> Map.empty
| otherwise -> Map.singleton name (biVersion bi, vr)
|
k-bx/stackage
|
Stackage/InstallInfo.hs
|
mit
| 8,479 | 0 | 25 | 2,772 | 2,046 | 1,048 | 998 | 171 | 5 |
{-# LANGUAGE CPP #-}
{-
Copyright (C) 2010-2016 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.UTF8
Copyright : Copyright (C) 2010-2016 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <[email protected]>
Stability : alpha
Portability : portable
UTF-8 aware string IO functions that will work with GHC 6.10, 6.12, or 7.
-}
module Text.Pandoc.UTF8 ( readFile
, writeFile
, getContents
, putStr
, putStrLn
, hPutStr
, hPutStrLn
, hGetContents
, toString
, fromString
, toStringLazy
, fromStringLazy
, encodePath
, decodeArg
)
where
import System.IO hiding (readFile, writeFile, getContents,
putStr, putStrLn, hPutStr, hPutStrLn, hGetContents)
import Prelude hiding (readFile, writeFile, getContents, putStr, putStrLn)
import qualified System.IO as IO
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text.Encoding as T
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TL
readFile :: FilePath -> IO String
readFile f = do
h <- openFile (encodePath f) ReadMode
hGetContents h
writeFile :: FilePath -> String -> IO ()
writeFile f s = withFile (encodePath f) WriteMode $ \h -> hPutStr h s
getContents :: IO String
getContents = hGetContents stdin
putStr :: String -> IO ()
putStr s = hPutStr stdout s
putStrLn :: String -> IO ()
putStrLn s = hPutStrLn stdout s
hPutStr :: Handle -> String -> IO ()
hPutStr h s = hSetEncoding h utf8 >> IO.hPutStr h s
hPutStrLn :: Handle -> String -> IO ()
hPutStrLn h s = hSetEncoding h utf8 >> IO.hPutStrLn h s
hGetContents :: Handle -> IO String
hGetContents = fmap toString . B.hGetContents
-- hGetContents h = hSetEncoding h utf8_bom
-- >> hSetNewlineMode h universalNewlineMode
-- >> IO.hGetContents h
-- | Drop BOM (byte order marker) if present at beginning of string.
-- Note that Data.Text converts the BOM to code point FEFF, zero-width
-- no-break space, so if the string begins with this we strip it off.
dropBOM :: String -> String
dropBOM ('\xFEFF':xs) = xs
dropBOM xs = xs
filterCRs :: String -> String
filterCRs ('\r':'\n':xs) = '\n': filterCRs xs
filterCRs ('\r':xs) = '\n' : filterCRs xs
filterCRs (x:xs) = x : filterCRs xs
filterCRs [] = []
-- | Convert UTF8-encoded ByteString to String, also
-- removing '\r' characters.
toString :: B.ByteString -> String
toString = filterCRs . dropBOM . T.unpack . T.decodeUtf8
fromString :: String -> B.ByteString
fromString = T.encodeUtf8 . T.pack
-- | Convert UTF8-encoded ByteString to String, also
-- removing '\r' characters.
toStringLazy :: BL.ByteString -> String
toStringLazy = filterCRs . dropBOM . TL.unpack . TL.decodeUtf8
fromStringLazy :: String -> BL.ByteString
fromStringLazy = TL.encodeUtf8 . TL.pack
encodePath :: FilePath -> FilePath
decodeArg :: String -> String
#if MIN_VERSION_base(4,4,0)
encodePath = id
decodeArg = id
#else
encodePath = B.unpack . fromString
decodeArg = toString . B.pack
#endif
|
janschulz/pandoc
|
src/Text/Pandoc/UTF8.hs
|
gpl-2.0
| 4,115 | 0 | 10 | 1,043 | 722 | 405 | 317 | 63 | 1 |
{-
Copyright 2015 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module Foreign.Marshal (module M) where
import "base" Foreign.Marshal as M
|
Ye-Yong-Chi/codeworld
|
codeworld-base/src/Foreign/Marshal.hs
|
apache-2.0
| 745 | 0 | 4 | 136 | 23 | 17 | 6 | 4 | 0 |
yes = \x -> a x where
|
mpickering/hlint-refactor
|
tests/examples/Lambda26.hs
|
bsd-3-clause
| 21 | 1 | 6 | 6 | 19 | 8 | 11 | 1 | 1 |
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module UnitTests.Distribution.Compat.Graph
( tests
, arbitraryGraph
) where
import Distribution.Compat.Graph
import qualified Prelude
import Prelude hiding (null)
import Test.Tasty
import Test.Tasty.QuickCheck
import qualified Data.Set as Set
import Control.Monad
import qualified Data.Graph as G
import Data.Array ((!))
import Data.Maybe
import Data.List (sort)
tests :: [TestTree]
tests =
[ testProperty "arbitrary unbroken" (prop_arbitrary_unbroken :: Graph (Node Int ()) -> Bool)
, testProperty "nodes consistent" (prop_nodes_consistent :: Graph (Node Int ()) -> Bool)
, testProperty "edges consistent" (prop_edges_consistent :: Graph (Node Int ()) -> Property)
, testProperty "closure consistent" (prop_closure_consistent :: Graph (Node Int ()) -> Property)
]
-- Our arbitrary instance does not generate broken graphs
prop_arbitrary_unbroken :: Graph a -> Bool
prop_arbitrary_unbroken g = Prelude.null (broken g)
-- Every node from 'toList' maps to a vertex which
-- is present in the constructed graph, and maps back
-- to a node correctly.
prop_nodes_consistent :: (Eq a, IsNode a) => Graph a -> Bool
prop_nodes_consistent g = all p (toList g)
where
(_, vtn, ktv) = toGraph g
p n = case ktv (nodeKey n) of
Just v -> vtn v == n
Nothing -> False
-- A non-broken graph has the 'nodeNeighbors' of each node
-- equal the recorded adjacent edges in the node graph.
prop_edges_consistent :: IsNode a => Graph a -> Property
prop_edges_consistent g = Prelude.null (broken g) ==> all p (toList g)
where
(gr, vtn, ktv) = toGraph g
p n = sort (nodeNeighbors n)
== sort (map (nodeKey . vtn) (gr ! fromJust (ktv (nodeKey n))))
-- Closure is consistent with reachable
prop_closure_consistent :: (Show a, IsNode a) => Graph a -> Property
prop_closure_consistent g =
not (null g) ==>
forAll (elements (toList g)) $ \n ->
Set.fromList (map nodeKey (fromJust (closure g [nodeKey n])))
== Set.fromList (map (nodeKey . vtn) (G.reachable gr (fromJust (ktv (nodeKey n)))))
where
(gr, vtn, ktv) = toGraph g
hasNoDups :: Ord a => [a] -> Bool
hasNoDups = loop Set.empty
where
loop _ [] = True
loop s (x:xs) | s' <- Set.insert x s, Set.size s' > Set.size s
= loop s' xs
| otherwise
= False
-- | Produces a graph of size @len@. We sample with 'suchThat'; if we
-- dropped duplicate entries our size could be smaller.
arbitraryGraph :: (Ord k, Arbitrary k, Arbitrary a) => Int -> Gen (Graph (Node k a))
arbitraryGraph len = do
-- Careful! Assume k is much larger than size.
ks <- vectorOf len arbitrary `suchThat` hasNoDups
ns <- forM ks $ \k -> do
a <- arbitrary
ns <- listOf (elements ks)
-- Allow duplicates!
return (N a k ns)
return (fromList ns)
instance (Ord k, Arbitrary k, Arbitrary a) => Arbitrary (Graph (Node k a)) where
arbitrary = sized $ \n -> do
len <- choose (0, n)
arbitraryGraph len
|
sopvop/cabal
|
Cabal/tests/UnitTests/Distribution/Compat/Graph.hs
|
bsd-3-clause
| 3,153 | 0 | 18 | 743 | 1,027 | 533 | 494 | 62 | 2 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hr-HR">
<title>OAST Support Add-on</title>
<maps>
<homeID>oast</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/oast/src/main/javahelp/org/zaproxy/addon/oast/resources/help_hr_HR/helpset_hr_HR.hs
|
apache-2.0
| 965 | 98 | 29 | 157 | 401 | 212 | 189 | -1 | -1 |
module Tests.Bundle ( tests ) where
import Boilerplater
import Utilities
import qualified Data.Vector.Fusion.Bundle as S
import Test.QuickCheck
import Test.Framework
import Test.Framework.Providers.QuickCheck2
import Text.Show.Functions ()
import Data.List (foldl', foldl1', unfoldr, find, findIndex)
import System.Random (Random)
#define COMMON_CONTEXT(a) \
VANILLA_CONTEXT(a)
#define VANILLA_CONTEXT(a) \
Eq a, Show a, Arbitrary a, CoArbitrary a, TestData a, Model a ~ a, EqTest a ~ Property
testSanity :: forall v a. (COMMON_CONTEXT(a)) => S.Bundle v a -> [Test]
testSanity _ = [
testProperty "fromList.toList == id" prop_fromList_toList,
testProperty "toList.fromList == id" prop_toList_fromList
]
where
prop_fromList_toList :: P (S.Bundle v a -> S.Bundle v a)
= (S.fromList . S.toList) `eq` id
prop_toList_fromList :: P ([a] -> [a])
= (S.toList . (S.fromList :: [a] -> S.Bundle v a)) `eq` id
testPolymorphicFunctions :: forall v a. (COMMON_CONTEXT(a)) => S.Bundle v a -> [Test]
testPolymorphicFunctions _ = $(testProperties [
'prop_eq,
'prop_length, 'prop_null,
'prop_empty, 'prop_singleton, 'prop_replicate,
'prop_cons, 'prop_snoc, 'prop_append,
'prop_head, 'prop_last, 'prop_index,
'prop_extract, 'prop_init, 'prop_tail, 'prop_take, 'prop_drop,
'prop_map, 'prop_zipWith, 'prop_zipWith3,
'prop_filter, 'prop_takeWhile, 'prop_dropWhile,
'prop_elem, 'prop_notElem,
'prop_find, 'prop_findIndex,
'prop_foldl, 'prop_foldl1, 'prop_foldl', 'prop_foldl1',
'prop_foldr, 'prop_foldr1,
'prop_prescanl, 'prop_prescanl',
'prop_postscanl, 'prop_postscanl',
'prop_scanl, 'prop_scanl', 'prop_scanl1, 'prop_scanl1',
'prop_concatMap,
'prop_unfoldr
])
where
-- Prelude
prop_eq :: P (S.Bundle v a -> S.Bundle v a -> Bool) = (==) `eq` (==)
prop_length :: P (S.Bundle v a -> Int) = S.length `eq` length
prop_null :: P (S.Bundle v a -> Bool) = S.null `eq` null
prop_empty :: P (S.Bundle v a) = S.empty `eq` []
prop_singleton :: P (a -> S.Bundle v a) = S.singleton `eq` singleton
prop_replicate :: P (Int -> a -> S.Bundle v a)
= (\n _ -> n < 1000) ===> S.replicate `eq` replicate
prop_cons :: P (a -> S.Bundle v a -> S.Bundle v a) = S.cons `eq` (:)
prop_snoc :: P (S.Bundle v a -> a -> S.Bundle v a) = S.snoc `eq` snoc
prop_append :: P (S.Bundle v a -> S.Bundle v a -> S.Bundle v a) = (S.++) `eq` (++)
prop_head :: P (S.Bundle v a -> a) = not . S.null ===> S.head `eq` head
prop_last :: P (S.Bundle v a -> a) = not . S.null ===> S.last `eq` last
prop_index = \xs ->
not (S.null xs) ==>
forAll (choose (0, S.length xs-1)) $ \i ->
unP prop xs i
where
prop :: P (S.Bundle v a -> Int -> a) = (S.!!) `eq` (!!)
prop_extract = \xs ->
forAll (choose (0, S.length xs)) $ \i ->
forAll (choose (0, S.length xs - i)) $ \n ->
unP prop i n xs
where
prop :: P (Int -> Int -> S.Bundle v a -> S.Bundle v a) = S.slice `eq` slice
prop_tail :: P (S.Bundle v a -> S.Bundle v a) = not . S.null ===> S.tail `eq` tail
prop_init :: P (S.Bundle v a -> S.Bundle v a) = not . S.null ===> S.init `eq` init
prop_take :: P (Int -> S.Bundle v a -> S.Bundle v a) = S.take `eq` take
prop_drop :: P (Int -> S.Bundle v a -> S.Bundle v a) = S.drop `eq` drop
prop_map :: P ((a -> a) -> S.Bundle v a -> S.Bundle v a) = S.map `eq` map
prop_zipWith :: P ((a -> a -> a) -> S.Bundle v a -> S.Bundle v a -> S.Bundle v a) = S.zipWith `eq` zipWith
prop_zipWith3 :: P ((a -> a -> a -> a) -> S.Bundle v a -> S.Bundle v a -> S.Bundle v a -> S.Bundle v a)
= S.zipWith3 `eq` zipWith3
prop_filter :: P ((a -> Bool) -> S.Bundle v a -> S.Bundle v a) = S.filter `eq` filter
prop_takeWhile :: P ((a -> Bool) -> S.Bundle v a -> S.Bundle v a) = S.takeWhile `eq` takeWhile
prop_dropWhile :: P ((a -> Bool) -> S.Bundle v a -> S.Bundle v a) = S.dropWhile `eq` dropWhile
prop_elem :: P (a -> S.Bundle v a -> Bool) = S.elem `eq` elem
prop_notElem :: P (a -> S.Bundle v a -> Bool) = S.notElem `eq` notElem
prop_find :: P ((a -> Bool) -> S.Bundle v a -> Maybe a) = S.find `eq` find
prop_findIndex :: P ((a -> Bool) -> S.Bundle v a -> Maybe Int)
= S.findIndex `eq` findIndex
prop_foldl :: P ((a -> a -> a) -> a -> S.Bundle v a -> a) = S.foldl `eq` foldl
prop_foldl1 :: P ((a -> a -> a) -> S.Bundle v a -> a) = notNullS2 ===>
S.foldl1 `eq` foldl1
prop_foldl' :: P ((a -> a -> a) -> a -> S.Bundle v a -> a) = S.foldl' `eq` foldl'
prop_foldl1' :: P ((a -> a -> a) -> S.Bundle v a -> a) = notNullS2 ===>
S.foldl1' `eq` foldl1'
prop_foldr :: P ((a -> a -> a) -> a -> S.Bundle v a -> a) = S.foldr `eq` foldr
prop_foldr1 :: P ((a -> a -> a) -> S.Bundle v a -> a) = notNullS2 ===>
S.foldr1 `eq` foldr1
prop_prescanl :: P ((a -> a -> a) -> a -> S.Bundle v a -> S.Bundle v a)
= S.prescanl `eq` prescanl
prop_prescanl' :: P ((a -> a -> a) -> a -> S.Bundle v a -> S.Bundle v a)
= S.prescanl' `eq` prescanl
prop_postscanl :: P ((a -> a -> a) -> a -> S.Bundle v a -> S.Bundle v a)
= S.postscanl `eq` postscanl
prop_postscanl' :: P ((a -> a -> a) -> a -> S.Bundle v a -> S.Bundle v a)
= S.postscanl' `eq` postscanl
prop_scanl :: P ((a -> a -> a) -> a -> S.Bundle v a -> S.Bundle v a)
= S.scanl `eq` scanl
prop_scanl' :: P ((a -> a -> a) -> a -> S.Bundle v a -> S.Bundle v a)
= S.scanl' `eq` scanl
prop_scanl1 :: P ((a -> a -> a) -> S.Bundle v a -> S.Bundle v a) = notNullS2 ===>
S.scanl1 `eq` scanl1
prop_scanl1' :: P ((a -> a -> a) -> S.Bundle v a -> S.Bundle v a) = notNullS2 ===>
S.scanl1' `eq` scanl1
prop_concatMap = forAll arbitrary $ \xs ->
forAll (sized (\n -> resize (n `div` S.length xs) arbitrary)) $ \f -> unP prop f xs
where
prop :: P ((a -> S.Bundle v a) -> S.Bundle v a -> S.Bundle v a) = S.concatMap `eq` concatMap
limitUnfolds f (theirs, ours) | ours >= 0
, Just (out, theirs') <- f theirs = Just (out, (theirs', ours - 1))
| otherwise = Nothing
prop_unfoldr :: P (Int -> (Int -> Maybe (a,Int)) -> Int -> S.Bundle v a)
= (\n f a -> S.unfoldr (limitUnfolds f) (a, n))
`eq` (\n f a -> unfoldr (limitUnfolds f) (a, n))
testBoolFunctions :: forall v. S.Bundle v Bool -> [Test]
testBoolFunctions _ = $(testProperties ['prop_and, 'prop_or ])
where
prop_and :: P (S.Bundle v Bool -> Bool) = S.and `eq` and
prop_or :: P (S.Bundle v Bool -> Bool) = S.or `eq` or
testBundleFunctions = testSanity (undefined :: S.Bundle v Int)
++ testPolymorphicFunctions (undefined :: S.Bundle v Int)
++ testBoolFunctions (undefined :: S.Bundle v Bool)
tests = [ testGroup "Data.Vector.Fusion.Bundle" testBundleFunctions ]
|
dolio/vector
|
tests/Tests/Bundle.hs
|
bsd-3-clause
| 7,455 | 0 | 19 | 2,300 | 3,280 | 1,745 | 1,535 | -1 | -1 |
{-# LANGUAGE MagicHash, NoImplicitPrelude, UnboxedTuples, UnliftedFFITypes, Trustworthy #-}
module GHC.Debug ( debugLn, debugErrLn ) where
import GHC.Prim
import GHC.Types
import GHC.Tuple ()
debugLn :: [Char] -> IO ()
debugLn xs = IO (\s0 ->
case mkMBA s0 xs of
(# s1, mba #) ->
case c_debugLn mba of
IO f -> f s1)
debugErrLn :: [Char] -> IO ()
debugErrLn xs = IO (\s0 ->
case mkMBA s0 xs of
(# s1, mba #) ->
case c_debugErrLn mba of
IO f -> f s1)
foreign import ccall unsafe "debugLn"
c_debugLn :: MutableByteArray# RealWorld -> IO ()
foreign import ccall unsafe "debugErrLn"
c_debugErrLn :: MutableByteArray# RealWorld -> IO ()
mkMBA :: State# RealWorld -> [Char] ->
(# State# RealWorld, MutableByteArray# RealWorld #)
mkMBA s0 xs = -- Start with 1 so that we have space to put in a \0 at
-- the end
case len 1# xs of
l ->
case newByteArray# l s0 of
(# s1, mba #) ->
case write mba 0# xs s1 of
s2 -> (# s2, mba #)
where len l [] = l
len l (_ : xs') = len (l +# 1#) xs'
write mba offset [] s = writeCharArray# mba offset '\0'# s
write mba offset (C# x : xs') s
= case writeCharArray# mba offset x s of
s' ->
write mba (offset +# 1#) xs' s'
|
frantisekfarka/ghc-dsi
|
libraries/ghc-prim/GHC/Debug.hs
|
bsd-3-clause
| 1,538 | 0 | 14 | 622 | 463 | 235 | 228 | 37 | 3 |
{-# OPTIONS -XRebindableSyntax #-}
-- Haskell98!
-- Tests of the do-notation for the parameterized monads
-- We demonstrate a variable-type state `monadic' transformer
-- and its phantom-type-state relative to enforce the locking protocol
-- (a lock can be released only if it is being held, and acquired only
-- if it is not being held)
-- The tests are based on the code
-- http://okmij.org/ftp/Computation/monads.html#param-monad
-- Please search for DO-NOT-YET
module DoParamM where
import Prelude (const, String, ($), (.), Maybe(..),
Int, fromInteger, succ, pred, fromEnum, toEnum,
(+), Char, (==), Bool(..),
IO, getLine, putStrLn, read, show)
import qualified Prelude
import qualified Control.Monad.State as State
import qualified Control.Monad.Identity as IdM
-- A parameterized `monad'
class Monadish m where
return :: a -> m p p a
fail :: String -> m p p a
(>>=) :: m p q a -> (a -> m q r b) -> m p r b
m1 >> m2 = m1 >>= (const m2)
-- All regular monads are the instances of the parameterized monad
newtype RegularM m p q a = RegularM{unRM :: m a}
instance Prelude.Monad m => Monadish (RegularM m) where
return = RegularM . Prelude.return
fail = RegularM . Prelude.fail
m >>= f = RegularM ((Prelude.>>=) (unRM m) (unRM . f))
-- As a warm-up, we write the regular State computation, with the same
-- type of state throughout. We thus inject Monad.State into the
-- parameterized monad
test1 = State.runState (unRM c) (0::Int) where
c = gget >>= (\v -> gput (succ v) >> return v)
gget :: (State.MonadState s m) => RegularM m s s s
gget = RegularM State.get
gput :: (State.MonadState s m) => s -> RegularM m s s ()
gput = RegularM . State.put
-- (0,1)
-- The same in the do-notation
test1_do = State.runState (unRM c) (0::Int) where
c = do
v <- gget
gput (succ v)
return v
gget :: (State.MonadState s m) => RegularM m s s s
gget = RegularM State.get
gput :: (State.MonadState s m) => s -> RegularM m s s ()
gput = RegularM . State.put
-- (0,1)
-- Introduce the variable-type state (transformer)
newtype VST m si so v = VST{runVST:: si -> m (so,v)}
instance Prelude.Monad m => Monadish (VST m) where
return x = VST (\si -> Prelude.return (si,x))
fail x = VST (\si -> Prelude.fail x)
m >>= f = VST (\si -> (Prelude.>>=) (runVST m si)
(\ (sm,x) -> runVST (f x) sm))
vsget :: Prelude.Monad m => VST m si si si
vsget = VST (\si -> Prelude.return (si,si))
vsput :: Prelude.Monad m => so -> VST m si so ()
vsput x = VST (\si -> Prelude.return (x,()))
-- Repeat test1 via VST: the type of the state is the same
vsm1 () = vsget >>= (\v -> vsput (succ v) >> return v)
-- The same with the do-notation
vsm1_do () = do
v <- vsget
vsput (succ v)
return v
{-
*DoParamM> :t vsm1
vsm1 :: (Monadish (VST m), IdM.Monad m, Prelude.Enum si) =>
() -> VST m si si si
-}
test2 = IdM.runIdentity (runVST (vsm1 ()) (0::Int))
-- (1,0)
test2_do = IdM.runIdentity (runVST (vsm1_do ()) (0::Int))
-- (1,0)
-- Now, we vary the type of the state, from Int to a Char
vsm2 () = vsget >>= (\v -> vsput ((toEnum (65+v))::Char) >>
vsget >>= \v' -> return (v,v'))
{-
*DoParamM> :t vsm2
vsm2 :: (Monadish (VST m), IdM.Monad m) => () -> VST m Int Char (Int, Char)
-}
-- The same with the do-notation
-- the following does not yet work
vsm2_do () = do
v <- vsget
vsput ((toEnum (65+v))::Char)
v' <- vsget
return (v,v')
test3 = IdM.runIdentity (runVST (vsm2 ()) (0::Int))
-- ('A',(0,'A'))
test3_do = IdM.runIdentity (runVST (vsm2_do ()) (0::Int))
-- ('A',(0,'A'))
{- The following is a deliberate error:
DoParamM.hs:147:55:
Couldn't match expected type `Int' against inferred type `Char'
In the second argument of `(==)', namely `v''
In the first argument of `return', namely `(v == v')'
In the expression: return (v == v')
vsm3 () = vsget >>= (\v -> vsput ((toEnum (65+v))::Char) >>
vsget >>= \v' -> return (v==v'))
-}
-- The following too must report a type error -- the expression
-- return (v == v') must be flagged, rather than something else
vsm3_do () = do
v <- vsget
vsput ((toEnum (65+v))::Char)
v' <- vsget
return (v==v')
-- Try polymorphic recursion, over the state.
-- crec1 invokes itself, and changes the type of the state from
-- some si to Bool.
crec1 :: (Prelude.Enum si, Prelude.Monad m) => VST m si si Int
crec1 = vsget >>= (\s1 -> case fromEnum s1 of
0 -> return 0
1 -> vsput (pred s1) >> return 1
_ -> vsput True >>
crec1 >>= (\v ->
(vsput s1 >> -- restore state type to si
return (v + 10))))
-- The same in the do-notation
crec1_do :: (Prelude.Enum si, Prelude.Monad m) => VST m si si Int
crec1_do = do
s1 <- vsget
case fromEnum s1 of
0 -> return 0
1 -> do {vsput (pred s1); return 1}
_ -> do
vsput True
v <- crec1_do
vsput s1 -- restore state type to si
return (v + 10)
test4 = IdM.runIdentity (runVST crec1 'a')
-- ('a',11)
test4_do = IdM.runIdentity (runVST crec1_do 'a')
-- ('a',11)
-- Another example, to illustrate locking and static reasoning about
-- the locking state
data Locked = Locked; data Unlocked = Unlocked
newtype LIO p q a = LIO{unLIO::IO a}
instance Monadish LIO where
return = LIO . Prelude.return
m >>= f = LIO ((Prelude.>>=) (unLIO m) (unLIO . f))
lput :: String -> LIO p p ()
lput = LIO . putStrLn
lget :: LIO p p String
lget = LIO getLine
-- In the real program, the following will execute actions to acquire
-- or release the lock. Here, we just print out our intentions.
lock :: LIO Unlocked Locked ()
lock = LIO (putStrLn "Lock")
unlock :: LIO Locked Unlocked ()
unlock = LIO (putStrLn "UnLock")
-- We should start in unlocked state, and finish in the same state
runLIO :: LIO Unlocked Unlocked a -> IO a
runLIO = unLIO
-- User code
tlock1 = lget >>= (\l ->
return (read l) >>= (\x ->
lput (show (x+1))))
tlock1r = runLIO tlock1
-- the same in the do-notation
tlock1_do = do
l <- lget
let x = read l
lput (show (x+1))
{-
*VarStateM> :t tlock1
tlock1 :: LIO p p ()
Inferred type has the same input and output states and is polymorphic:
tlock1 does not affect the state of the lock.
-}
tlock2 = lget >>= (\l ->
lock >> (
return (read l) >>= (\x ->
lput (show (x+1)))))
tlock2_do = do
l <- lget
lock
let x = read l
lput (show (x+1))
{-
*VarStateM> :t tlock2
tlock2 :: LIO Unlocked Locked ()
The inferred type says that the computation does the locking.
-}
tlock3 = tlock2 >> unlock
tlock3r = runLIO tlock3
{-
*DoParamM> :t tlock3
tlock3 :: LIO Unlocked Unlocked ()
-}
{-
*DoParamM> tlock3r
-- user input: 123
Lock
124
UnLock
-}
tlock3_do = do {tlock2_do; unlock}
tlock3r_do = runLIO tlock3_do
-- An attempt to execute the following
-- tlock4 = tlock2 >> tlock2
{-
gives a type error:
Couldn't match expected type `Locked'
against inferred type `Unlocked'
Expected type: LIO Locked r b
Inferred type: LIO Unlocked Locked ()
In the expression: tlock2
In a lambda abstraction: \ _ -> tlock2
The error message correctly points out an error of acquiring an already
held lock.
-}
-- The following too must be an error: with the SAME error message as above
tlock4_do = do {tlock2_do; tlock2_do}
-- Similarly, the following gives a type error because of an attempt
-- to release a lock twice
-- tlock4' = tlock2 >> unlock >> unlock
{-
DoParamM.hs:298:30:
Couldn't match expected type `Unlocked'
against inferred type `Locked'
Expected type: LIO Unlocked r b
Inferred type: LIO Locked Unlocked ()
In the second argument of `(>>)', namely `unlock'
In the expression: (tlock2 >> unlock) >> unlock
-}
-- The following too must be an error: with the SAME error message as above
tlock4'_do = do {tlock2_do; unlock; unlock}
|
forked-upstream-packages-for-ghcjs/ghc
|
testsuite/tests/rebindable/DoParamM.hs
|
bsd-3-clause
| 8,229 | 151 | 20 | 2,177 | 2,051 | 1,143 | 908 | 124 | 3 |
#!/usr/bin/env stack
-- stack --install-ghc runghc --package turtle
{-# LANGUAGE OverloadedStrings #-}
import Turtle
example = do
f <- ls "/tmp" -- this actually iterates over all the files
liftIO $ print f
liftIO $ print "foo"
main = sh example
|
JoshuaGross/haskell-learning-log
|
Code/turtle/select-ls.hs
|
mit
| 277 | 1 | 8 | 72 | 54 | 25 | 29 | 7 | 1 |
-----------------------------------------------------------------------------
--
-- Module : Language.PureScript.TypeChecker
-- Copyright : (c) Phil Freeman 2013
-- License : MIT
--
-- Maintainer : Phil Freeman <[email protected]>
-- Stability : experimental
-- Portability :
--
-- |
-- The top-level type checker, which checks all declarations in a module.
--
-----------------------------------------------------------------------------
{-# LANGUAGE FlexibleInstances #-}
module Language.PureScript.TypeChecker (
module T,
typeCheckAll
) where
import Language.PureScript.TypeChecker.Monad as T
import Language.PureScript.TypeChecker.Kinds as T
import Language.PureScript.TypeChecker.Types as T
import Language.PureScript.TypeChecker.Synonyms as T
import Data.Maybe
import Data.Monoid ((<>))
import qualified Data.Map as M
import Control.Monad.State
import Control.Monad.Error
import Language.PureScript.Types
import Language.PureScript.Names
import Language.PureScript.Kinds
import Language.PureScript.Declarations
import Language.PureScript.TypeClassDictionaries
import Language.PureScript.Environment
import Language.PureScript.Errors
addDataType :: ModuleName -> ProperName -> [String] -> [(ProperName, [Type])] -> Kind -> Check ()
addDataType moduleName name args dctors ctorKind = do
env <- getEnv
putEnv $ env { types = M.insert (Qualified (Just moduleName) name) (ctorKind, DataType args dctors) (types env) }
forM_ dctors $ \(dctor, tys) ->
rethrow (strMsg ("Error in data constructor " ++ show dctor) <>) $
addDataConstructor moduleName name args dctor tys
addDataConstructor :: ModuleName -> ProperName -> [String] -> ProperName -> [Type] -> Check ()
addDataConstructor moduleName name args dctor tys = do
env <- getEnv
let retTy = foldl TypeApp (TypeConstructor (Qualified (Just moduleName) name)) (map TypeVar args)
let dctorTy = foldr function retTy tys
let polyType = mkForAll args dctorTy
putEnv $ env { dataConstructors = M.insert (Qualified (Just moduleName) dctor) (name, polyType) (dataConstructors env) }
addTypeSynonym :: ModuleName -> ProperName -> [String] -> Type -> Kind -> Check ()
addTypeSynonym moduleName name args ty kind = do
env <- getEnv
putEnv $ env { types = M.insert (Qualified (Just moduleName) name) (kind, TypeSynonym) (types env)
, typeSynonyms = M.insert (Qualified (Just moduleName) name) (args, ty) (typeSynonyms env) }
valueIsNotDefined :: ModuleName -> Ident -> Check ()
valueIsNotDefined moduleName name = do
env <- getEnv
case M.lookup (moduleName, name) (names env) of
Just _ -> throwError . strMsg $ show name ++ " is already defined"
Nothing -> return ()
addValue :: ModuleName -> Ident -> Type -> NameKind -> Check ()
addValue moduleName name ty nameKind = do
env <- getEnv
putEnv (env { names = M.insert (moduleName, name) (ty, nameKind) (names env) })
addTypeClass :: ModuleName -> ProperName -> [String] -> [(Qualified ProperName, [Type])] -> [Declaration] -> Check ()
addTypeClass moduleName pn args implies ds =
let members = map toPair ds in
modify $ \st -> st { checkEnv = (checkEnv st) { typeClasses = M.insert (Qualified (Just moduleName) pn) (args, members, implies) (typeClasses . checkEnv $ st) } }
where
toPair (TypeDeclaration ident ty) = (ident, ty)
toPair (PositionedDeclaration _ d) = toPair d
toPair _ = error "Invalid declaration in TypeClassDeclaration"
addTypeClassDictionaries :: [TypeClassDictionaryInScope] -> Check ()
addTypeClassDictionaries entries =
let mentries = M.fromList [ ((canonicalizeDictionary entry, mn), entry) | entry@TypeClassDictionaryInScope{ tcdName = Qualified mn _ } <- entries ]
in modify $ \st -> st { checkEnv = (checkEnv st) { typeClassDictionaries = (typeClassDictionaries . checkEnv $ st) `M.union` mentries } }
checkTypeClassInstance :: ModuleName -> Type -> Check ()
checkTypeClassInstance _ (TypeVar _) = return ()
checkTypeClassInstance _ (TypeConstructor ctor) = do
env <- getEnv
when (ctor `M.member` typeSynonyms env) . throwError . strMsg $ "Type synonym instances are disallowed"
return ()
checkTypeClassInstance m (TypeApp t1 t2) = checkTypeClassInstance m t1 >> checkTypeClassInstance m t2
checkTypeClassInstance _ ty = throwError $ mkErrorStack "Type class instance head is invalid." (Just (TypeError ty))
-- |
-- Type check all declarations in a module
--
-- At this point, many declarations will have been desugared, but it is still necessary to
--
-- * Kind-check all types and add them to the @Environment@
--
-- * Type-check all values and add them to the @Environment@
--
-- * Bring type class instances into scope
--
-- * Process module imports
--
typeCheckAll :: Maybe ModuleName -> ModuleName -> [Declaration] -> Check [Declaration]
typeCheckAll _ _ [] = return []
typeCheckAll mainModuleName moduleName (d@(DataDeclaration name args dctors) : rest) = do
rethrow (strMsg ("Error in type constructor " ++ show name) <>) $ do
ctorKind <- kindsOf True moduleName name args (concatMap snd dctors)
addDataType moduleName name args dctors ctorKind
ds <- typeCheckAll mainModuleName moduleName rest
return $ d : ds
typeCheckAll mainModuleName moduleName (d@(DataBindingGroupDeclaration tys) : rest) = do
rethrow (strMsg "Error in data binding group" <>) $ do
let syns = mapMaybe toTypeSynonym tys
let dataDecls = mapMaybe toDataDecl tys
(syn_ks, data_ks) <- kindsOfAll moduleName syns (map (\(name, args, dctors) -> (name, args, concatMap snd dctors)) dataDecls)
forM_ (zip dataDecls data_ks) $ \((name, args, dctors), ctorKind) ->
addDataType moduleName name args dctors ctorKind
forM_ (zip syns syn_ks) $ \((name, args, ty), kind) ->
addTypeSynonym moduleName name args ty kind
ds <- typeCheckAll mainModuleName moduleName rest
return $ d : ds
where
toTypeSynonym (TypeSynonymDeclaration nm args ty) = Just (nm, args, ty)
toTypeSynonym (PositionedDeclaration _ d') = toTypeSynonym d'
toTypeSynonym _ = Nothing
toDataDecl (DataDeclaration nm args dctors) = Just (nm, args, dctors)
toDataDecl (PositionedDeclaration _ d') = toDataDecl d'
toDataDecl _ = Nothing
typeCheckAll mainModuleName moduleName (d@(TypeSynonymDeclaration name args ty) : rest) = do
rethrow (strMsg ("Error in type synonym " ++ show name) <>) $ do
kind <- kindsOf False moduleName name args [ty]
addTypeSynonym moduleName name args ty kind
ds <- typeCheckAll mainModuleName moduleName rest
return $ d : ds
typeCheckAll _ _ (TypeDeclaration _ _ : _) = error "Type declarations should have been removed"
typeCheckAll mainModuleName moduleName (ValueDeclaration name nameKind [] Nothing val : rest) = do
d <- rethrow (strMsg ("Error in declaration " ++ show name) <>) $ do
valueIsNotDefined moduleName name
[(_, (val', ty))] <- typesOf mainModuleName moduleName [(name, val)]
addValue moduleName name ty nameKind
return $ ValueDeclaration name nameKind [] Nothing val'
ds <- typeCheckAll mainModuleName moduleName rest
return $ d : ds
typeCheckAll _ _ (ValueDeclaration{} : _) = error "Binders were not desugared"
typeCheckAll mainModuleName moduleName (BindingGroupDeclaration vals : rest) = do
d <- rethrow (strMsg ("Error in binding group " ++ show (map (\(ident, _, _) -> ident) vals)) <>) $ do
forM_ (map (\(ident, _, _) -> ident) vals) $ \name ->
valueIsNotDefined moduleName name
tys <- typesOf mainModuleName moduleName $ map (\(ident, _, ty) -> (ident, ty)) vals
vals' <- forM (zipWith (\(name, nameKind, _) (_, (val, ty)) -> (name, val, nameKind, ty)) vals tys) $ \(name, val, nameKind, ty) -> do
addValue moduleName name ty nameKind
return (name, nameKind, val)
return $ BindingGroupDeclaration vals'
ds <- typeCheckAll mainModuleName moduleName rest
return $ d : ds
typeCheckAll mainModuleName moduleName (d@(ExternDataDeclaration name kind) : rest) = do
env <- getEnv
putEnv $ env { types = M.insert (Qualified (Just moduleName) name) (kind, ExternData) (types env) }
ds <- typeCheckAll mainModuleName moduleName rest
return $ d : ds
typeCheckAll mainModuleName moduleName (d@(ExternDeclaration importTy name _ ty) : rest) = do
rethrow (strMsg ("Error in foreign import declaration " ++ show name) <>) $ do
env <- getEnv
kind <- kindOf moduleName ty
guardWith (strMsg "Expected kind *") $ kind == Star
case M.lookup (moduleName, name) (names env) of
Just _ -> throwError . strMsg $ show name ++ " is already defined"
Nothing -> putEnv (env { names = M.insert (moduleName, name) (ty, Extern importTy) (names env) })
ds <- typeCheckAll mainModuleName moduleName rest
return $ d : ds
typeCheckAll mainModuleName moduleName (d@(FixityDeclaration _ name) : rest) = do
ds <- typeCheckAll mainModuleName moduleName rest
env <- getEnv
guardWith (strMsg ("Fixity declaration with no binding: " ++ name)) $ M.member (moduleName, Op name) $ names env
return $ d : ds
typeCheckAll mainModuleName currentModule (d@(ImportDeclaration moduleName _ _) : rest) = do
tcds <- getTypeClassDictionaries
let instances = filter (\tcd -> let Qualified (Just mn) _ = tcdName tcd in moduleName == mn) tcds
addTypeClassDictionaries [ tcd { tcdName = Qualified (Just currentModule) ident, tcdType = TCDAlias (canonicalizeDictionary tcd) }
| tcd <- instances
, let (Qualified _ ident) = tcdName tcd
]
ds <- typeCheckAll mainModuleName currentModule rest
return $ d : ds
typeCheckAll mainModuleName moduleName (d@(TypeClassDeclaration pn args implies tys) : rest) = do
addTypeClass moduleName pn args implies tys
ds <- typeCheckAll mainModuleName moduleName rest
return $ d : ds
typeCheckAll mainModuleName moduleName (TypeInstanceDeclaration dictName deps className tys _ : rest) = do
typeCheckAll mainModuleName moduleName (ExternInstanceDeclaration dictName deps className tys : rest)
typeCheckAll mainModuleName moduleName (d@(ExternInstanceDeclaration dictName deps className tys) : rest) = do
mapM_ (checkTypeClassInstance moduleName) tys
forM_ deps $ mapM_ (checkTypeClassInstance moduleName) . snd
addTypeClassDictionaries [TypeClassDictionaryInScope (Qualified (Just moduleName) dictName) className tys (Just deps) TCDRegular]
ds <- typeCheckAll mainModuleName moduleName rest
return $ d : ds
typeCheckAll mainModuleName moduleName (PositionedDeclaration pos d : rest) =
rethrowWithPosition pos $ do
(d' : rest') <- typeCheckAll mainModuleName moduleName (d : rest)
return (PositionedDeclaration pos d' : rest')
|
bergmark/purescript
|
src/Language/PureScript/TypeChecker.hs
|
mit
| 10,630 | 25 | 23 | 1,900 | 3,524 | 1,810 | 1,714 | 164 | 6 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE Strict #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE CPP #-}
{- |
Module : Business.Bookkeeping
Copyright : Kadzuya Okamoto 2017
License : MIT
Stability : experimental
Portability : unknown
This module exports core functions and types for bookkeeping.
-}
module Business.Bookkeeping
(
-- * Usage examples
-- $setup
-- * Pritty printers
ppr
-- * Constructors
, year
, month
, activity
, dateTrans
, categoryName
-- * Converters
, runTransactions
-- * Types
, Transactions
, YearTransactions
, MonthTransactions
, DateTransactions
, Journal(..)
, Year
, Month
, Date
, Description
, unDescription
, SubDescription
, unSubDescription
, Amount
, unAmount
, Category(..)
, CategoryName
, unCategoryName
, unCategorySubName
, CategoryType(..)
, DebitCategory(..)
, CreditCategory(..)
) where
import Data.Monoid ((<>))
import qualified Data.Semigroup as Sem
import Data.String (IsString(..))
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Time.Calendar (Day, fromGregorian)
import Data.Transaction (Transaction, action, tMap, toList)
{- $setup
>>> :{
let
advance :: CategoryName -> SubDescription -> Amount -> DateTransactions
advance name = dateTrans
(DebitCategory $ Category name Expenses)
(CreditCategory $ Category "Deposit" Assets)
sample =
year 2015 $ do
month 1 $ do
activity 1 "Constant expenses" $
advance "Communication" "Mobile phone" 3000
activity 3 "Mail a contract" $ do
advance "Communication" "Stamp" 50
advance "Office supplies" "Envelope" 100
month 2 $
activity 1 "Constant expenses" $
advance "Communication" "Mobile phone" 3000
:}
-}
{-| Convert from 'YearTransactions' to 'Transactions'.
-}
year :: Year -> YearTransactions -> Transactions
year y = tMap ($ y)
{-| Convert from 'MonthTransactions' to 'YearTransactions'. -}
month :: Month -> MonthTransactions -> YearTransactions
month m = tMap ($ m)
{-| Convert from 'DateTransactions' to 'MonthTransactions'.
-}
activity :: Date -> Description -> DateTransactions -> MonthTransactions
activity d desc = tMap (($ desc) . ($ d))
dateTrans :: DebitCategory
-> CreditCategory
-> SubDescription
-> Amount
-> DateTransactions
dateTrans debit credit subdesc amount =
action $ \d desc m y ->
Journal
{ tDay = fromGregorian (unYear y) (unMonth m) (unDate d)
, tDescription = desc
, tSubDescription = subdesc
, tDebit = debit
, tCredit = credit
, tAmount = amount
}
{-| Take list of `Journal` out from 'Transactions'.
-}
runTransactions :: Transactions -> [Journal]
runTransactions = toList
{-| A pretty printer for `Transactions`.
>>> ppr sample
tDay: 2015-01-01
tDescription: Constant expenses
tSubDescription: Mobile phone
tDebit: Communication (Expenses)
tCredit: Deposit (Assets)
tAmount: 3000
<BLANKLINE>
tDay: 2015-01-03
tDescription: Mail a contract
tSubDescription: Stamp
tDebit: Communication (Expenses)
tCredit: Deposit (Assets)
tAmount: 50
<BLANKLINE>
tDay: 2015-01-03
tDescription: Mail a contract
tSubDescription: Envelope
tDebit: Office supplies (Expenses)
tCredit: Deposit (Assets)
tAmount: 100
<BLANKLINE>
tDay: 2015-02-01
tDescription: Constant expenses
tSubDescription: Mobile phone
tDebit: Communication (Expenses)
tCredit: Deposit (Assets)
tAmount: 3000
<BLANKLINE>
-}
ppr :: Transactions -> IO ()
ppr = T.putStr . T.unlines . map format . runTransactions
where
format :: Journal -> T.Text
format Journal {..} =
T.unlines
[ "tDay: " <> (T.pack . show) tDay
, "tDescription: " <> unDescription tDescription
, "tSubDescription: " <> unSubDescription tSubDescription
, T.concat
[ "tDebit: "
, (unCategoryName . cName . unDebitCategory) tDebit
, maybe "" (" - " <>) $
(unCategorySubName . cName . unDebitCategory) tDebit
, " ("
, (T.pack . show . cType . unDebitCategory) tDebit
, ")"
]
, T.concat
[ "tCredit: "
, (unCategoryName . cName . unCreditCategory) tCredit
, maybe "" (" - " <>) $
(unCategorySubName . cName . unCreditCategory) tCredit
, " ("
, (T.pack . show . cType . unCreditCategory) tCredit
, ")"
]
, "tAmount: " <> (T.pack . show . unAmount) tAmount
]
{- ==============
- Types
- ============== -}
type Transactions = Transaction Journal
type YearTransactions = Transaction (Year -> Journal)
type MonthTransactions = Transaction (Month -> Year -> Journal)
type DateTransactions = Transaction (Date -> Description -> Month -> Year -> Journal)
{-| A type representing a transaction.
-}
data Journal = Journal
{ tDay :: Day
, tDescription :: Description
, tSubDescription :: SubDescription
, tDebit :: DebitCategory
, tCredit :: CreditCategory
, tAmount :: Amount
} deriving (Show, Read, Ord, Eq)
newtype Year = Year
{ unYear :: Integer
} deriving (Show, Read, Ord, Eq, Num, Enum, Real, Integral)
newtype Month = Month
{ unMonth :: Int
} deriving (Show, Read, Ord, Eq, Num, Enum, Real, Integral)
newtype Date = Date
{ unDate :: Int
} deriving (Show, Read, Ord, Eq, Num, Enum, Real, Integral)
newtype Description = Description
{ unDescription :: Text
} deriving (Show, Read, Ord, Eq, Sem.Semigroup, Monoid)
instance IsString Description where
fromString = Description . fromString
newtype SubDescription = SubDescription
{ unSubDescription :: Text
} deriving (Show, Read, Ord, Eq, Sem.Semigroup, Monoid)
instance IsString SubDescription where
fromString = SubDescription . fromString
newtype Amount = Amount
{ unAmount :: Int
} deriving (Show, Read, Ord, Eq, Num, Enum, Real, Integral)
newtype DebitCategory = DebitCategory
{ unDebitCategory :: Category
} deriving (Show, Read, Ord, Eq)
newtype CreditCategory = CreditCategory
{ unCreditCategory :: Category
} deriving (Show, Read, Ord, Eq)
{-| A type representing an accounts title.
-}
data Category = Category
{ cName :: CategoryName
, cType :: CategoryType
} deriving (Show, Read, Ord, Eq)
data CategoryName = CategoryName
{ unCategoryName :: Text
, unCategorySubName :: Maybe Text
} deriving (Show, Read, Ord, Eq)
categoryName :: Text -> Maybe Text -> CategoryName
categoryName = CategoryName
instance IsString CategoryName where
fromString str =
CategoryName (fromString str) Nothing
data CategoryType
= Assets
| Liabilities
| Stock
| Revenue
| Expenses
deriving (Show, Read, Ord, Eq, Enum)
|
arowM/haskell-bookkeeping
|
src/Business/Bookkeeping.hs
|
mit
| 6,765 | 0 | 16 | 1,535 | 1,454 | 834 | 620 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Main ( main ) where
import Foreign.C.String
import Data.Bits
import Control.Concurrent
import Control.Exception
import qualified Data.Set as S
import Foreign.C.Types
import Foreign.Marshal.Alloc
import Foreign.Ptr
import Foreign.Storable
import Graphics.Caramia
import Graphics.Caramia.Prelude hiding ( init )
import Graphics.UI.SDL
import System.IO.Unsafe ( unsafePerformIO )
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit hiding ( Test )
foreign import ccall unsafe "memset" c_memset
:: Ptr a -> CInt -> CSize -> IO (Ptr b)
sdlLock :: MVar ()
sdlLock = unsafePerformIO $ newMVar ()
{-# NOINLINE sdlLock #-}
setup :: IO a -> IO a
setup action = runInBoundThread $ withMVar sdlLock $ \_ -> withCString "buffer" $ \cstr -> do
_ <- init SDL_INIT_VIDEO
_ <- glSetAttribute SDL_GL_CONTEXT_MAJOR_VERSION 3
_ <- glSetAttribute SDL_GL_CONTEXT_MINOR_VERSION 3
_ <- glSetAttribute SDL_GL_CONTEXT_PROFILE_MASK SDL_GL_CONTEXT_PROFILE_CORE
_ <- glSetAttribute SDL_GL_CONTEXT_FLAGS SDL_GL_CONTEXT_DEBUG_FLAG
w <- createWindow cstr SDL_WINDOWPOS_UNDEFINED SDL_WINDOWPOS_UNDEFINED
1500 1000
(SDL_WINDOW_OPENGL .|.
SDL_WINDOW_SHOWN)
ctx <- glCreateContext w
finally (giveContext action) $ do
glDeleteContext ctx
destroyWindow w
quit
main :: IO ()
main = defaultMain tests
tests :: [Test]
tests = [
testCase "Invalidation does not crash and burn" invalidateSomeBuffers
, testCase "I can create various types of buffers" createSomeBuffers
, testCase "I cannot map write-only buffers for reading" writeOnlyTest
, testCase "I cannot map read-only buffers for writing" readOnlyTest
, testCase "I cannot map no-access buffers for anything" noAccessTest
, testCase "I can map readwrite buffers for anything" readWriteTest
, testCase "What I write I can get back" writeReadBackTest
, testCase "Mapping with offset works" offsetMappingTest
, testCase "Mapping with unsynchronized flag set doesn't crash" unsyncTest
, testCase "Buffer copying works" copyBuffersTest
, testCase "Explicit flushing tentatively works" explicitFlushTest
, testCase ("I cannot invoke `explicitFlush` on a mapping without " <>
"`ExplicitFlush` flag.") incorrectExplicitFlushTest
, testCase "I cannot map with `ExplicitFlush` and no writing access."
noWriteExplicitFlushTest
, testCase "I cannot invoke `explicitFlush` without a mapping."
explicitFlushNoMapTest
]
explicitFlushNoMapTest :: IO ()
explicitFlushNoMapTest = setup $ do
buf <- newBufferFromList (take 10000 $ repeat (12 :: Word8))
(\old -> old { accessFlags = ReadWriteAccess })
expectException $ explicitFlush buf 10 10
noWriteExplicitFlushTest :: IO ()
noWriteExplicitFlushTest = setup $ do
buf <- newBufferFromList (take 10000 $ repeat (12 :: Word8))
(\old -> old { accessFlags = ReadWriteAccess })
expectException $ withMapping2 (S.singleton ExplicitFlush)
100 100 ReadAccess buf $ const $ return ()
expectException $ withMapping2 (S.singleton ExplicitFlush)
100 100 NoAccess buf $ const $ return ()
incorrectExplicitFlushTest :: IO ()
incorrectExplicitFlushTest = setup $ do
buf <- newBufferFromList (take 10000 $ repeat (12 :: Word8))
(\old -> old { accessFlags = ReadWriteAccess })
withMapping 100 100 WriteAccess buf $ \_ -> do
expectException $ explicitFlush buf 10 10
explicitFlushTest :: IO ()
explicitFlushTest = setup $ do
buf <- newBufferFromList (take 10000 $ repeat (12 :: Word8))
(\old -> old { accessFlags = ReadWriteAccess })
withMapping2 (S.singleton ExplicitFlush) 7800 1000 WriteAccess buf $ \ptr -> do
pokeElemOff (ptr :: Ptr Word8) 10 8
pokeElemOff (ptr :: Ptr Word8) 11 9
pokeElemOff (ptr :: Ptr Word8) 12 10
explicitFlush buf 8 3
withMapping 7800 100 ReadAccess buf $ \ptr -> do
let assM x y = do z <- peekElemOff (ptr :: Ptr Word8) x
assertEqual "explicitly flushed bytes are the same"
y z
assM 10 8
assM 11 9
assM 12 10
copyBuffersTest :: IO ()
copyBuffersTest = setup $ do
buf <- newBufferFromList (take 10000 $ repeat (77 :: Word8))
(\old -> old { accessFlags = ReadWriteAccess })
buf2 <- newBufferFromList (take 20000 $ repeat (99 :: Word8))
(\old -> old { accessFlags = ReadWriteAccess })
buf3 <- newBufferFromList (replicate 300 111 ++ replicate 300 222 :: [Word8])
(\old -> old { accessFlags = ReadWriteAccess })
-- copy from another to another
copy buf 11 buf2 13 5
-- copying inside the same buffer
copy buf3 3 buf3 311 5
withMapping 0 100 ReadAccess buf $ \ptr -> do
let cptr = castPtr ptr :: Ptr Word8
ass = assertEqual "bytes copied look correct"
assM x off = do v <- peekElemOff cptr off
ass x v
assM 77 0
assM 77 1
assM 77 2
assM 77 3
assM 77 4
assM 77 10
assM 99 11
assM 99 12
assM 99 13
assM 99 14
assM 99 15
assM 77 16
withMapping 0 600 ReadAccess buf3 $ \ptr -> do
let cptr = castPtr ptr :: Ptr Word8
ass = assertEqual "bytes copied look correct"
assM x off = do v <- peekElemOff cptr off
ass x v
assM 111 0
assM 111 1
assM 111 2
assM 222 3
assM 222 4
assM 222 5
assM 222 6
assM 222 7
assM 111 8
assM 111 9
assM 111 10
offsetMappingTest :: IO ()
offsetMappingTest = setup $ do
buf <- newBufferFromList (take 10000 $ cycle [(0 :: Word8)..])
(\old -> old { accessFlags = ReadWriteAccess })
for_ offsets $ \off -> withMapping off 32 ReadAccess buf $ \ptr -> do
let cptr = castPtr ptr :: Ptr Word8
expect m v = assertEqual "a byte read from a buffer should be the same that was written"
(fromIntegral $ (off+m) `mod` 256)
v
v1 <- peekElemOff cptr 0
v2 <- peekElemOff cptr 1
v3 <- peekElemOff cptr 2
v4 <- peekElemOff cptr 3
expect 0 v1
expect 1 v2
expect 2 v3
expect 3 v4
where
offsets = [ 0, 16, 1024, 2048, 2040, 8000 ]
writeReadBackTest :: IO ()
writeReadBackTest = setup $ do
buf <- newBuffer defaultBufferCreation
{ size = 1000000
, accessFlags = ReadWriteAccess }
withMapping 0 1000000 WriteAccess buf $ \ptr -> do
void $ c_memset ptr 33 1000000
pokeElemOff (castPtr ptr :: Ptr Word8) 12371 177
withMapping 0 1000000 ReadAccess buf $ \ptr -> do
let cptr = castPtr ptr :: Ptr Word8
v <- peekElemOff cptr 12371
assertEqual "a byte read from a buffer should be the same that was written"
177
v
v_prec <- peekElemOff cptr 12370
v_succ <- peekElemOff cptr 12372
assertEqual "a byte read from a buffer should be the same that was written"
33
v_prec
assertEqual "a byte read from a buffer should be the same that was written"
33
v_succ
writeOnlyTest :: IO ()
writeOnlyTest = setup $ do
buf <- newBuffer defaultBufferCreation
{ size = 1024
, accessFlags = WriteAccess }
expectException $ bufferMap 0 1024 ReadAccess buf
expectException $ bufferMap 0 1024 ReadWriteAccess buf
withMapping 0 1024 WriteAccess buf (const $ return ())
readOnlyTest :: IO ()
readOnlyTest = setup $ do
buf <- newBuffer defaultBufferCreation
{ size = 1024
, accessFlags = ReadAccess }
expectException $ bufferMap 0 1024 WriteAccess buf
expectException $ bufferMap 0 1024 ReadWriteAccess buf
withMapping 0 1024 ReadAccess buf (const $ return ())
noAccessTest :: IO ()
noAccessTest = setup $ do
buf <- newBuffer defaultBufferCreation
{ size = 1024
, accessFlags = NoAccess }
for_ allAccessFlags $ \flags ->
expectException $ bufferMap 0 1024 flags buf
readWriteTest :: IO ()
readWriteTest = setup $ do
buf <- newBuffer defaultBufferCreation
{ size = 1024
, accessFlags = ReadWriteAccess }
for_ allAccessFlags $ \flags ->
when (flags /= NoAccess) $
withMapping 0 1024 flags buf (const $ return ())
expectException :: IO a -> IO ()
expectException action = do
result <- try action
case result of
Left (_ :: SomeException) -> return ()
Right _ -> error "expected exception"
withSomeBuffers :: (BufferCreation -> Buffer -> IO ()) -> IO ()
withSomeBuffers action =
for_ allAccessHints $ \hints ->
for_ allAccessFlags $ \flags ->
for_ sizes $ \size -> do
let c = defaultBufferCreation {
size = size
, initialData = Nothing
, accessFlags = flags
, accessHints = hints }
newBuffer c >>= action c
allocaBytes size $ \ptr -> do
let c' = c { initialData = Just ptr }
newBuffer c' >>= action c'
runPendingFinalizers
unsyncTest :: IO ()
unsyncTest = setup $ withSomeBuffers $ \cr buf -> do
when (accessFlags cr == WriteAccess ||
accessFlags cr == ReadWriteAccess) $
withMapping2 (S.singleton UnSynchronized)
0
(size cr)
WriteAccess
buf $ const $ return ()
invalidateSomeBuffers :: IO ()
invalidateSomeBuffers = setup $ withSomeBuffers $ \_ -> invalidateBuffer
createSomeBuffers :: IO ()
createSomeBuffers = setup $ withSomeBuffers (\_ _ -> return ())
sizes :: [Int]
sizes = [ 1, 2, 3, 4, 8, 11, 12, 512, 1024, 1023, 1025, 1000000 ]
allAccessFlags :: [AccessFlags]
allAccessFlags = [ReadAccess, WriteAccess, ReadWriteAccess, NoAccess]
allAccessHints :: [(AccessFrequency, AccessNature)]
allAccessHints = [ (f, n) | f <- [Stream, Static, Dynamic]
, n <- [Draw, Read, Copy] ]
|
Noeda/caramia
|
tests/buffer/Main.hs
|
mit
| 10,664 | 0 | 22 | 3,348 | 3,004 | 1,482 | 1,522 | 252 | 2 |
{-# LANGUAGE TemplateHaskell, OverloadedStrings #-}
{-
A base bundle is used for incremental linking. it contains information about
the symbols that have already been linked. These symbols are not included
again in the incrementally linked program.
The base contains a CompactorState for consistent renaming of private names
and packed initialization of info tables and static closures.
-}
module Gen2.Base where
import qualified Gen2.Object as Object
import Compiler.JMacro
import Control.Lens
import Control.Monad
import Data.Array
import qualified Data.Binary as DB
import qualified Data.Binary.Get as DB
import qualified Data.Binary.Put as DB
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import qualified Data.Map as M
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as S
import Data.Text (Text)
import qualified Data.Text as T
newLocals :: [Ident]
newLocals = filter (not . isKeyword) $ map (TxtI . T.pack) $ (map (:[]) chars0) ++ concatMap mkIdents [1..]
where
mkIdents n = [c0:cs | c0 <- chars0, cs <- replicateM n chars]
chars0 = ['a'..'z']++['A'..'Z']
chars = chars0++['0'..'9']
isKeyword (TxtI i) = i `HS.member` kwSet
kwSet = HS.fromList keywords
keywords = [ "break", "case", "catch", "continue", "debugger"
, "default", "delete", "do", "else", "finally", "for"
, "function", "if", "in", "instanceof", "new", "return"
, "switch", "this", "throw", "try", "typeof", "var", "void"
, "while", "with"
, "class", "enum", "export", "extends", "import", "super", "const"
, "implements", "interface", "let", "package", "private", "protected"
, "public", "static", "yield"
, "null", "true", "false"
]
renamedVars :: [Ident]
renamedVars = map (\(TxtI xs) -> TxtI ("h$$"<>xs)) newLocals
data CompactorState =
CompactorState { _identSupply :: [Ident] -- ^ ident supply for new names
, _nameMap :: !(HashMap Text Ident) -- ^ renaming mapping for internal names
, _entries :: !(HashMap Text Int) -- ^ entry functions (these get listed in the metadata init array)
, _numEntries :: !Int
, _statics :: !(HashMap Text Int) -- ^ mapping of global closure -> index in current block, for static initialisation
, _numStatics :: !Int -- ^ number of static entries
, _labels :: !(HashMap Text Int) -- ^ non-Haskell JS labels
, _numLabels :: !Int -- ^ number of labels
, _parentEntries :: !(HashMap Text Int) -- ^ entry functions we're not linking, offset where parent gets [0..n], grantparent [n+1..k] etc
, _parentStatics :: !(HashMap Text Int) -- ^ objects we're not linking in base bundle
, _parentLabels :: !(HashMap Text Int) -- ^ non-Haskell JS labels in parent
} deriving (Show)
makeLenses ''CompactorState
emptyCompactorState :: CompactorState
emptyCompactorState = CompactorState renamedVars HM.empty HM.empty 0 HM.empty 0 HM.empty 0 HM.empty HM.empty HM.empty
showBase :: Base -> String
showBase b = unlines
[ "Base:"
, " packages: " ++ show (basePkgs b)
, " number of units: " ++ show (S.size $ baseUnits b)
, " renaming table size: " ++
show (baseCompactorState b ^. nameMap . to HM.size)
]
data Base = Base { baseCompactorState :: CompactorState
, basePkgs :: [Object.Package]
, baseUnits :: Set (Object.Package, Text, Int)
}
emptyBase :: Base
emptyBase = Base emptyCompactorState [] S.empty
putBase :: Base -> DB.Put
putBase (Base cs packages funs) = do
DB.putByteString "GHCJSBASE"
DB.putLazyByteString Object.versionTag
putCs cs
putList DB.put packages
putList putPkg pkgs
putList DB.put mods
putList putFun (S.toList funs)
where
pi :: Int -> DB.Put
pi = DB.putWord32le . fromIntegral
uniq :: Ord a => [a] -> [a]
uniq = S.toList . S.fromList
pkgs = uniq (map (\(x,_,_) -> x) $ S.toList funs)
pkgsM = M.fromList (zip pkgs [(0::Int)..])
mods = uniq (map (\(_,x,_) -> x) $ S.toList funs)
modsM = M.fromList (zip mods [(0::Int)..])
putList f xs = pi (length xs) >> mapM_ f xs
-- serialise the compactor state
putCs (CompactorState [] _ _ _ _ _ _ _ _ _ _) = error "putBase: putCs exhausted renamer symbol names"
putCs (CompactorState (ns:_) nm es _ ss _ ls _ pes pss pls) = do
DB.put ns
DB.put (HM.toList nm)
DB.put (HM.toList es)
DB.put (HM.toList ss)
DB.put (HM.toList ls)
DB.put (HM.toList pes)
DB.put (HM.toList pss)
DB.put (HM.toList pls)
putPkg (Object.Package k) = DB.put k
-- fixme group things first
putFun (p,m,s) = pi (pkgsM M.! p) >> pi (modsM M.! m) >> DB.put s
getBase :: FilePath -> DB.Get Base
getBase file = getBase'
where
gi :: DB.Get Int
gi = fromIntegral <$> DB.getWord32le
getList f = DB.getWord32le >>= \n -> replicateM (fromIntegral n) f
getFun ps ms = (,,) <$> ((ps!) <$> gi) <*> ((ms!) <$> gi) <*> DB.get
la xs = listArray (0, length xs - 1) xs
getPkg = Object.Package <$> DB.get
getCs = do
n <- DB.get
nm <- HM.fromList <$> DB.get
es <- HM.fromList <$> DB.get
ss <- HM.fromList <$> DB.get
ls <- HM.fromList <$> DB.get
pes <- HM.fromList <$> DB.get
pss <- HM.fromList <$> DB.get
pls <- HM.fromList <$> DB.get
return (CompactorState (dropWhile (/=n) renamedVars) nm es (HM.size es) ss (HM.size ss) ls (HM.size ls) pes pss pls)
getBase' = do
hdr <- DB.getByteString 9
when (hdr /= "GHCJSBASE") (error $ "getBase: invalid base file: " <> file)
vt <- DB.getLazyByteString (fromIntegral Object.versionTagLength)
when (vt /= Object.versionTag) (error $ "getBase: incorrect version: " <> file)
cs <- makeCompactorParent <$> getCs
linkedPackages <- getList DB.get
pkgs <- la <$> getList getPkg
mods <- la <$> getList DB.get
funs <- getList (getFun pkgs mods)
return (Base cs linkedPackages $ S.fromList funs)
-- | make a base state from a CompactorState: empty the current symbols sets, move everything to
-- the parent
makeCompactorParent :: CompactorState -> CompactorState
makeCompactorParent (CompactorState is nm es nes ss nss ls nls pes pss pls) =
CompactorState is nm HM.empty 0 HM.empty 0 HM.empty 0
(HM.union (fmap (+nes) pes) es)
(HM.union (fmap (+nss) pss) ss)
(HM.union (fmap (+nls) pls) ls)
instance DB.Binary Base where
get = getBase "<unknown file>"
put = putBase
|
forked-upstream-packages-for-ghcjs/ghcjs
|
ghcjs/src/Gen2/Base.hs
|
mit
| 7,027 | 0 | 14 | 1,990 | 2,160 | 1,150 | 1,010 | -1 | -1 |
{-# OPTIONS_GHC -F -pgmF htfpp #-}
module InternalTests.MacrosTests where
import Test.Framework
import GraphDB.Util.Prelude
import qualified GraphDB.Util.Prelude.TH as TH
import qualified GraphDB.Model as G
import qualified GraphDB.Macros as G
type Catalogue = (UID Artist, UID Genre, UID Song)
newtype UID a = UID Int deriving (Show, Eq, Ord, Generic, Data, Typeable, Enum, Num, Real, Integral)
data Artist = Artist Name deriving (Show, Eq, Ord, Generic, Data, Typeable)
data Genre = Genre Name deriving (Show, Eq, Ord, Generic, Data, Typeable)
data Song = Song Name deriving (Show, Eq, Ord, Generic, Data, Typeable)
type Name = Text
data Identified a = Identified {-# UNPACK #-} !(UID a) !a deriving (Show, Eq, Ord, Generic, Data, Typeable)
-- Edges
-------------------------
instance G.Edge Catalogue (Identified Artist) where
data Index Catalogue (Identified Artist) =
Catalogue_Artist_UID (UID Artist) |
Catalogue_Artist_Name Text
deriving (Eq, Generic)
indexes (Identified uid (Artist n)) =
[Catalogue_Artist_UID uid, Catalogue_Artist_Name n]
instance G.Edge Catalogue (Identified Genre) where
data Index Catalogue (Identified Genre) =
Catalogue_Genre_UID (UID Genre) |
Catalogue_Genre_Name Text
deriving (Eq, Generic)
indexes (Identified uid (Genre n)) =
[Catalogue_Genre_UID uid, Catalogue_Genre_Name n]
instance G.Edge (Identified Genre) (Identified Song) where
data Index (Identified Genre) (Identified Song) =
Genre_Song
deriving (Eq, Generic)
indexes _ = [Genre_Song]
instance G.Edge (Identified Song) (Identified Artist) where
data Index (Identified Song) (Identified Artist) =
Song_Artist
deriving (Eq, Generic)
indexes _ = [Song_Artist]
-- Boilerplate
-------------------------
G.deriveSetup ''Catalogue
instance (Hashable a) => Hashable (UID a)
instance (Serializable m a) => Serializable m (UID a)
|
nikita-volkov/graph-db
|
executables/InternalTests/MacrosTests.hs
|
mit
| 1,894 | 0 | 10 | 321 | 676 | 359 | 317 | -1 | -1 |
module Graphics.CG.Draw.Lines (drawPath, drawClosedLines) where
import Control.Lens
import Graphics.Gloss
myHead :: [a] -> [a]
myHead [] = []
myHead (x:_) = [x]
drawPath :: [Vector] -> Picture
drawPath = Color white . Line
drawClosedLines :: [Vector] -> Picture
drawClosedLines s = Color white . Line $ ss
where ss = s ++ myHead s
|
jagajaga/CG-Haskell
|
Graphics/CG/Draw/Lines.hs
|
mit
| 359 | 0 | 8 | 82 | 140 | 78 | 62 | 11 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
module Distill.Expr.TypeCheck
( TCM
, Renamer
, runTCM
, assumeIn
, assumesIn
, defineIn
, definesIn
, checkType
, inferType
, checkEqual
, normalize
, renumber
, renumber'
, renumberDecls
, subst
) where
import Control.Arrow hiding ((<+>))
import Control.Monad.Except
import Control.Monad.Reader
import Control.Monad.State
import Data.Functor.Foldable hiding (Foldable, Unfoldable, Mu)
import Data.Maybe (fromJust)
import Text.PrettyPrint
import Distill.Expr.Syntax
import Distill.Expr.Representation
import Distill.Util
-- | The monad used for type checking.
newtype TCM b a = TCM { unTCM ::
ReaderT ([(b, Type' b)], [(b, Expr' b)])
(StateT (Renamer b)
(Either String))
a
} deriving ( Applicative, Functor, Monad
, MonadReader ([(b, Type' b)], [(b, Expr' b)])
, MonadState (Renamer b)
, MonadError String
)
-- | The type of variable renaming functions. Each function will only be used
-- once and the list should be infinite.
type Renamer b = [b -> b]
-- | Run the type checking monad.
runTCM :: Renamer b -> TCM b a -> Either String a
runTCM renamer tcm =
flip evalStateT renamer $
flip runReaderT ([], []) $
unTCM tcm
-- | Assume a variable is a given type while type checking a certain piece of
-- code. This is useful for introducing abstractions such as in lambdas and
-- dependent products (called 'Forall' here).
assumeIn :: b -> Type' b -> TCM b a -> TCM b a
assumeIn x t = local (first ((x,t):))
-- | Plural version of 'assumeIn'.
assumesIn :: [(b, Type' b)] -> TCM b a -> TCM b a
assumesIn newAssumes = local (first (newAssumes ++))
-- | Get the current set of assumptions.
getAssumptions :: TCM b [(b, Type' b)]
getAssumptions = fst <$> ask
-- | Provide a definition of a constant while type checking a certain piece of
-- code. This is useful for introducing functions that may appear in types
-- and so need to be normalized.
defineIn :: b -> Expr' b -> TCM b a -> TCM b a
defineIn x m = local (second ((x,m):))
-- | Plural version of 'defineIn'.
definesIn :: [(b, Expr' b)] -> TCM b a -> TCM b a
definesIn newDefs = local (second (newDefs ++))
-- | Get the current set of definitions.
getDefinitions :: TCM b [(b, Expr' b)]
getDefinitions = snd <$> ask
-- | Check that an expression has a certain type. If the expression is not the
-- given type an error will be generated
checkType :: (Pretty b, Eq b) => Expr' b -> Type' b -> TCM b ()
checkType expr type_ = checkEqual type_ =<< inferType expr
-- | Infer the type of an expression, if possible. If a type cannot be
-- inferred an error will be generated.
inferType :: (Pretty b, Eq b) => Expr' b -> TCM b (Type' b)
inferType expr = case expr of
Var x -> do
assumed <- getAssumptions
case lookup x assumed of
Just t -> return t
Nothing -> throwError $ "Unbound variable '" ++ prettyShow x ++ "'."
Star -> return Star
Let x m n -> do
t <- inferType m
assumeIn x t $ defineIn x m $ inferType n
Forall x t s -> do
checkType t Star
assumeIn x t $ checkType s Star
return Star
Lambda x t m -> do
checkType t Star
s <- assumeIn x t $ inferType m
return (Forall x t s)
Apply m n -> do
Forall x t s <- inferType m >>= normalize >>= \case
correct@Forall{} -> return correct
incorrect -> throwError $
"Cannot apply to non-function type '" ++ prettyShow incorrect
++ "'."
checkType n t
subst x n s
Mu x t s -> do
assumeIn x t $ checkType s t
return t
Fold m foldedType -> do
Mu x t s <- normalize foldedType >>= \case
correct@Mu{} -> return correct
incorrect -> throwError $
"Cannot fold into non-mu type '" ++ prettyShow incorrect ++ "'."
unfoldedType <- subst x (Mu x t s) s
checkType m unfoldedType
return foldedType
Unfold m -> do
Mu x t s <- inferType m >>= normalize >>= \case
correct@Mu{} -> return correct
incorrect -> throwError $
"Cannot unfold non-mu type '" ++ prettyShow incorrect ++ "'."
subst x (Mu x t s) s
UnitT -> return Star
UnitV -> return UnitT
Product x t s -> do
checkType t Star
assumeIn x t $ checkType s Star
return Star
Pack x m n s -> do
t <- inferType m
assumeIn x t $ checkType s Star
case s of
UnknownType -> do
s <- inferType n
return (Product x t s)
_ -> do
s' <- inferType n
assumeIn x t $ defineIn x m $ checkEqual s' s
return (Product x t s)
Unpack m x y n -> do
-- The bind is also x, so no need to re-mention it.
Product _ t s <- inferType m >>= normalize >>= \case
correct@(Product z _ _) -> subst z (Var x) correct
incorrect -> throwError $
"Cannot unpack non-product type '" ++ prettyShow incorrect
++ "'."
assumeIn x t $ assumeIn y s $ inferType n
Coproduct ts -> do
mapM_ (flip checkType Star) ts
return Star
Inject m i t -> do
Coproduct ts <- normalize t >>= \case
correct@Coproduct{} -> return correct
incorrect -> throwError $
"Cannot inject into non-coproduct type '"
++ prettyShow incorrect ++ "'."
when (length ts <= i) $ throwError $
"Cannot inject into #" ++ show i ++ " of coproduct type '"
++ prettyShow (Coproduct ts) ++ "', which only has "
++ show (length ts) ++ " summands."
flip checkEqual (ts !! i) =<< inferType m
return (Coproduct ts)
CaseOf m cs -> do
Coproduct ts <- inferType m >>= normalize >>= \case
correct@Coproduct{} -> return correct
incorrect -> throwError $
"Cannot perform case analysis on non-coproduct type '"
++ prettyShow incorrect ++ "'."
when (length ts /= length cs) $ throwError $
"Case analysis is non-exhaustive."
when (null ts) $ throwError $
"Case analysis must have at least one case."
branchTypes <- forM (zip ts cs) $ \(t, (x, c)) ->
assumeIn x t $ inferType c
let resultType = head branchTypes
forM_ branchTypes (checkEqual resultType)
`catchError` (\err -> throwError $ err ++ "\n\t"
++ "While checking all case branches have the same type ("
++ prettyShow resultType ++ ").")
return resultType
CaseOfFalse m t -> do
inferType m >>= normalize >>= \case
Coproduct [] -> return ()
incorrect -> throwError $
"The principle of explosion can only be applied to the empty "
++ "coproduct; was supplied instead with '"
++ prettyShow incorrect ++ "'."
checkType t Star
return t
AnnotSource m s ->
catchError
(inferType m)
(\err -> throwError $ err ++ "\n\t At [" ++ show (sourceStartLine s)
++ ":" ++ show (sourceStartCol s) ++ "]")
UnknownType ->
throwError $ "Cannot infer the type of an unknown type."
-- | Check that two expressions are equal up to beta reduction. If they are
-- not, an error will be generated.
checkEqual :: (Eq b, Pretty b) => Expr' b -> Expr' b -> TCM b ()
checkEqual expr1 expr2 = do
expr1' <- normalize expr1
expr2' <- normalize expr2
checkEqual' expr1' expr2'
where
checkEqual' :: (Eq b, Pretty b) => Expr' b -> Expr' b -> TCM b ()
checkEqual' expr1 expr2 =
case (ignoringSource expr1, ignoringSource expr2) of
(Var x, Var y) | x == y ->
return ()
(Star, Star) ->
return ()
(Let x m n, Let y o p) ->
abstraction (x, m, n) (y, o, p)
(Forall x t s, Forall y r q) ->
abstraction (x, t, s) (y, r, q)
(Lambda x t m, Lambda y s n) ->
abstraction (x, t, m) (y, s, n)
(Apply m n, Apply o p) -> do
checkEqual' m o
checkEqual' n p
(Mu x t s, Mu y r q) -> do
abstraction (x, t, s) (y, r, q)
(Fold m t, Fold n s) -> do
checkEqual' m n
checkEqual' t s
(Unfold m, Unfold n) ->
checkEqual' m n
(UnitT, UnitT) ->
return ()
(UnitV, UnitV) ->
return ()
(Product x t s, Product y r q) ->
abstraction (x, t, s) (y, r, q)
(Pack x m n t, Pack y o p s) -> do
checkEqual' m o
checkEqual' n p
checkEqual' t =<< renameVar y x s
(Unpack m x y n, Unpack o a b p) -> do
checkEqual' m o
checkEqual' n =<< renameVar a x =<< renameVar b y p
(Coproduct ts, Coproduct ss) -> do
when (length ts /= length ss) $ throwError $ render $
text "Coproducts not equal because the number of their" <+>
text "summands is not equal." $$
nest 4 (ppr expr1) $$
nest 4 (ppr expr2)
forM_ (zip ts ss) (uncurry checkEqual')
(Inject m i t, Inject n j s) -> do
checkEqual' m n
when (i /= j) $ throwError $ render $
text "Cannot make injections equal because their" <+>
text "indices differ." $$
nest 4 (ppr expr1) $$
nest 4 (ppr expr2)
checkEqual' t s
(CaseOf m cs, CaseOf n ds) -> do
checkEqual' m n
when (length cs /= length ds) $ throwError $ render $
text "Cannot make case analyses equal because they" <+>
text "a different number of branches." $$
nest 4 (ppr expr1) $$
nest 4 (ppr expr2)
forM_ (zip cs ds) $ \((x, m), (y, n)) ->
checkEqual' m =<< renameVar y x n
(CaseOfFalse m t, CaseOfFalse n s) -> do
checkEqual' m n
checkEqual' t s
(m, n) -> throwError $ render $
text "Cannot make the following two types equal:" $$
nest 4 (ppr m) $$
nest 4 (ppr n)
where
abstraction (x, t, m) (y, s, n) = do
checkEqual' t s
checkEqual' m =<< renameVar y x n
renameVar x y m
| x == y = return m
| otherwise = subst x (Var y) m
-- | Reduce an expression up to normal form. May generate an error if
-- erroneous reductions would occur, such as applying an argument to a
-- non-function type.
normalize :: (Eq b, Pretty b) => Expr' b -> TCM b (Expr' b)
normalize = cata $ \case
VarF x -> do
definitions <- getDefinitions
case lookup x definitions of
Just m -> normalize m
Nothing -> return (Var x)
LetF x m n -> do
m' <- m
n' <- n
normalize =<< subst x m' n'
ApplyF m n ->
m >>= \case
Lambda x _ p -> do
n' <- n
normalize =<< subst x n' p
m' -> Apply m' <$> n
UnfoldF m ->
m >>= \case
Fold n _ -> return n
m' -> return (Unfold m')
UnpackF m x y n ->
m >>= \case
Pack _ o p _ -> normalize =<< subst x o =<< subst y p =<< n
m' -> Unpack m' x y <$> n
CaseOfF m cs ->
m >>= \case
Inject n i _ -> do
when (length cs <= i) $ throwError $
"Non-exhaustive case analysis during normalization."
let (x, c) = cs !! i
normalize =<< subst x n =<< c
m' -> CaseOf m' <$> sequence (map sequence cs)
AnnotSourceF m _ -> m
expr -> embed <$> sequence expr
-- | Renumber the identifiers in an expression such that they are unique. No
-- free variables should exist in the expression - if they do an exception may
-- be thrown.
renumber :: Eq b => (b -> Int -> b') -> Expr' b -> Expr' b'
renumber ctor expr = fst (renumber' ctor 0 [] expr)
-- | 'renumber', but with the ability to specify a starting index for renaming
-- and a set of already renumbered names. Also returns the next available index
-- for renumbering; this is useful for subsequent calls.
renumber' :: Eq b => (b -> Int -> b') -> Int -> [(b, b')] -> Expr' b
-> (Expr' b', Int)
renumber' ctor start rebound =
flip runState start . flip runReaderT rebound . cata phi
where
phi = \case
VarF x -> Var . fromJust . lookup x <$> ask
StarF -> return Star
LetF x m n -> abstraction Let x m n
ForallF x t s -> abstraction Forall x t s
LambdaF x t m -> abstraction Lambda x t m
ApplyF m n -> Apply <$> m <*> n
MuF x t s -> abstraction Mu x t s
FoldF m t -> Fold <$> m <*> t
UnfoldF m -> Unfold <$> m
UnitTF -> return UnitT
UnitVF -> return UnitV
ProductF x t s -> abstraction Product x t s
PackF x m n s -> do
x' <- gensym x
Pack x' <$> m <*> n <*> local ((x,x'):) s
UnpackF m x y n -> do
x' <- gensym x
y' <- gensym y
Unpack <$> m <*> pure x' <*> pure y'
<*> local (\xs -> (x,x'):(y,y'):xs) n
CoproductF ts -> Coproduct <$> sequence ts
InjectF m i t -> Inject <$> m <*> pure i <*> t
CaseOfF m cs ->
CaseOf <$> m <*> forM cs (\(x, c) -> do
x' <- gensym x
c' <- local ((x,x'):) c
return (x', c'))
CaseOfFalseF m t -> CaseOfFalse <$> m <*> t
AnnotSourceF m s -> AnnotSource <$> m <*> pure s
UnknownTypeF -> return UnknownType
gensym old = ctor old <$> getAndModify succ
abstraction sort x m n = do
x' <- gensym x
sort x' <$> m <*> local ((x,x'):) n
-- | Renumber a set of potentially mutually recursive declarations. This will
-- preserve names at the top level.
renumberDecls :: Eq b => (b -> Int -> b') -> [Decl' b] -> [Decl' b']
renumberDecls ctor decls = evalState (mapM renumberDecl decls) (0, [])
where
renumberDecl (Decl' x t m) = do
x' <- ctor x . fst <$> get
modify (succ *** ((x,x'):))
t' <- wrappedRenumber t
m' <- wrappedRenumber m
return (Decl' x' t' m')
wrappedRenumber m = do
(oldState, rebound) <- get
let (result, newState) = renumber' ctor oldState rebound m
modify (first (const newState))
return result
-- | Substitute an identifier for an expression in another expression. In other
-- words, @subst x m n@ corresponds to n[x := m].
subst :: (Eq b, Pretty b) => b -> Expr' b -> Expr' b -> TCM b (Expr' b)
subst z p = para $ \case
VarF x | x == z -> return p
LetF x m n | x == z -> abstraction Let x m n
ForallF x t s | x == z -> abstraction Forall x t s
LambdaF x t m | x == z -> abstraction Lambda x t m
MuF x t s | x == z -> abstraction Mu x t s
ProductF x t s | x == z -> abstraction Product x t s
PackF x m n s | x == z -> do
x' <- gensym x
m' <- snd m
n' <- snd n
s' <- subst z p =<< subst x (Var x') (fst s)
return (Pack x' m' n' s')
UnpackF m x y n | x == z || y == z -> do
m' <- snd m
x' <- gensym x
y' <- gensym y
n' <- subst z p =<< subst x (Var x') =<< subst y (Var y') (fst n)
return (Unpack m' x' y' n')
CaseOfF m cs -> do
CaseOf <$> (snd m) <*> forM cs (\(x, c) -> if x == z
then do
x' <- gensym x
c' <- subst z p =<< subst x (Var x') (fst c)
return (x', c')
else sequence (x, snd c))
expr -> embed <$> (sequence (snd <$> expr))
where
gensym x = ($ x) <$> head <$> getAndModify tail
abstraction sort x m n = do
x' <- gensym x
m' <- snd m
n' <- subst z p =<< subst x (Var x') (fst n)
return (sort x' m' n')
|
DNoved1/distill
|
src/Distill/Expr/TypeCheck.hs
|
mit
| 16,761 | 0 | 22 | 6,274 | 5,773 | 2,780 | 2,993 | 376 | 29 |
module Crypto.Cipher.ECDSA.Math where
data Point = Point Integer Integer
| PointO
deriving (Show, Eq)
-- |2^2 = x^3 + a*x + b mod P
data Curve = Curve Integer Integer Integer
--Integer -- ^a
--Integer -- ^b
--Integer -- ^P
deriving (Show)
type PrivateKey = Integer
type PublicKey = Point
-- |Calculate a / b (mod c)
modDiv :: Integer -> Integer -> Integer -> Maybe Integer
modDiv a b p = do
mibp <- modInv b p
return ((a * mibp) `mod` p)
modInv :: Integer -> Integer -> Maybe Integer
modInv a m
| x == 1 = return (y `mod` m)
| otherwise = Nothing
where
(x, y, _) = egcd a m
egcd :: Integer -> Integer -> (Integer, Integer, Integer)
egcd a b
| b == 0 = (a, 1, 0)
| otherwise = (d, t, s - (a `div` b) * t)
where
(d, s, t) = egcd b (a `mod` b)
-- |Elliptic curve point addition
pointAdd :: Curve -> Point -> Point -> Maybe Point
pointAdd c p q
| p == PointO = Just q
| q == PointO = Just p
| p == Point xq (-yq) = Just PointO
| p == q = pointDouble c p
| otherwise = do
l <- modDiv (yp - yq) (xp - xq) pr
let xr = (l ^ 2 - xp - xq) `mod` pr
let yr = (l * (xp - xr) - yp) `mod` pr
return (Point xr yr)
where
(Curve _ _ pr) = c
(Point xp yp) = p
(Point xq yq) = q
pointDouble :: Curve -> Point -> Maybe Point
pointDouble c p
| p == PointO = Just PointO
| otherwise = do
l <- modDiv (3 * xp ^ 2 + a) (2 * yp) pr
let xr = (l ^ 2 - 2 * xp) `mod` pr
let yr = (l * (xp - xr) - yp) `mod` pr
return (Point xr yr)
where
(Curve a _ pr) = c
(Point xp yp) = p
pointMul :: Curve -> Integer -> Point -> Maybe Point
pointMul c n p
| n == 0 = return PointO
| n `mod` 2 == 1 = do
p' <- pointMul c (n - 1) p
pointAdd c p p'
| otherwise = do
p' <- pointDouble c p
pointMul c (n `div` 2) p'
-- |Check that point satisfies elliptic curve equation
onCurve :: Curve -> Point -> Bool
-- onCurve _ Point0 = True TODO: Is this true?
onCurve cp@(Curve a b p) (Point x y) =
(y ^ 2) `mod` p == calcCurve cp x
calcCurve (Curve a b p) x = (x ^ 3 + a * x + b) `mod` p
|
fhaust/bitcoin
|
src/Crypto/Cipher/ECDSA/Math.hs
|
mit
| 2,238 | 0 | 16 | 766 | 1,039 | 534 | 505 | 59 | 1 |
module Main where
main = putStrLn "Hello World"
|
HSU-MilitaryLogisticsClub/ho-ver
|
src/imaging/main.hs
|
mit
| 48 | 0 | 5 | 8 | 12 | 7 | 5 | 2 | 1 |
module Test.SqlMarshaller
( sqlMarshallerTests,
)
where
import qualified Control.Monad.IO.Class as MIO
import qualified Data.Bifunctor as Bifunctor
import qualified Data.ByteString.Char8 as B8
import qualified Data.Either as Either
import qualified Data.Int as Int
import qualified Data.Set as Set
import qualified Data.String as String
import qualified Data.Text as T
import Hedgehog ((===))
import qualified Hedgehog as HH
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import qualified Orville.PostgreSQL.Internal.ErrorDetailLevel as ErrorDetailLevel
import qualified Orville.PostgreSQL.Internal.ExecutionResult as Result
import qualified Orville.PostgreSQL.Internal.FieldDefinition as FieldDefinition
import qualified Orville.PostgreSQL.Internal.MarshallError as MarshallError
import qualified Orville.PostgreSQL.Internal.SqlMarshaller as SqlMarshaller
import qualified Orville.PostgreSQL.Internal.SqlValue as SqlValue
import Test.Expr.TestSchema (assertEqualSqlRows)
import qualified Test.PgGen as PgGen
import qualified Test.Property as Property
sqlMarshallerTests :: Property.Group
sqlMarshallerTests =
Property.group
"SqlMarshaller"
[ property_returnPureValue
, property_combineWithApplicative
, property_marshellField_readSingleField
, prop_marshallField_missingColumn
, prop_marshallField_decodeValueFailure
, prop_marshallResultFromSql_Foo
, prop_marshallResultFromSql_Bar
, prop_foldMarshallerFields
, prop_passMaybeThrough
, prop_partialMap
]
property_returnPureValue :: Property.NamedProperty
property_returnPureValue =
Property.namedProperty "Can read a pure Int via SqlMarshaller" $ do
someInt <- HH.forAll generateInt
result <- marshallTestRowFromSql (pure someInt) (Result.mkFakeLibPQResult [] [[]])
Bifunctor.first show result === Right [someInt]
property_combineWithApplicative :: Property.NamedProperty
property_combineWithApplicative =
Property.namedProperty "Can combine SqlMarshallers with <*>" $ do
firstInt <- HH.forAll generateInt
secondInt <- HH.forAll generateInt
result <- marshallTestRowFromSql ((pure (+ firstInt)) <*> (pure secondInt)) (Result.mkFakeLibPQResult [] [[]])
Bifunctor.first show result === Right [firstInt + secondInt]
property_marshellField_readSingleField :: Property.NamedProperty
property_marshellField_readSingleField =
Property.namedProperty "Read a single field from a result row using marshallField" $ do
targetName <- HH.forAll generateName
targetValue <- HH.forAll generateInt32
namesBefore <- HH.forAll (generateNamesOtherThan targetName)
namesAfter <- HH.forAll (generateNamesOtherThan targetName)
valuesBefore <- HH.forAll (generateAssociatedValues namesBefore generateInt32)
valuesAfter <- HH.forAll (generateAssociatedValues namesAfter generateInt32)
let fieldDef = FieldDefinition.integerField targetName
marshaller = SqlMarshaller.marshallField id fieldDef
input =
Result.mkFakeLibPQResult
(map B8.pack (namesBefore ++ (targetName : namesAfter)))
[map SqlValue.fromInt32 (valuesBefore ++ (targetValue : valuesAfter))]
result <- marshallTestRowFromSql marshaller input
Bifunctor.first show result === Right [targetValue]
prop_marshallField_missingColumn :: Property.NamedProperty
prop_marshallField_missingColumn =
Property.namedProperty "marshallField fails gracefully when decoding a non-existent column" $ do
targetName <- HH.forAll generateName
otherNames <- HH.forAll (generateNamesOtherThan targetName)
otherValues <- HH.forAll (generateAssociatedValues otherNames generateInt32)
let fieldDef = FieldDefinition.integerField targetName
marshaller = SqlMarshaller.marshallField id fieldDef
input =
Result.mkFakeLibPQResult
(map B8.pack otherNames)
[map SqlValue.fromInt32 otherValues]
expectedError =
MarshallError.MarshallError
{ MarshallError.marshallErrorDetailLevel = ErrorDetailLevel.maximalErrorDetailLevel
, MarshallError.marshallErrorRowIdentifier = mempty
, MarshallError.marshallErrorDetails =
MarshallError.MissingColumnError $
MarshallError.MissingColumnErrorDetails
{ MarshallError.missingColumnName = B8.pack targetName
, MarshallError.actualColumnNames = Set.fromList $ fmap B8.pack otherNames
}
}
result <- marshallTestRowFromSql marshaller input
-- Use show on the error here so MarshallError and friends don't
-- need an Eq instance
Bifunctor.first show result === Left (show expectedError)
prop_marshallField_decodeValueFailure :: Property.NamedProperty
prop_marshallField_decodeValueFailure =
Property.namedProperty "marshallField fails gracefully when failing to decode a value" $ do
targetName <- HH.forAll generateName
nonIntegerText <- HH.forAll (Gen.text (Range.linear 0 10) Gen.alpha)
let fieldDef = FieldDefinition.integerField targetName
marshaller = SqlMarshaller.marshallField id fieldDef
input =
Result.mkFakeLibPQResult
[B8.pack targetName]
[[SqlValue.fromText nonIntegerText]]
result <- marshallTestRowFromSql marshaller input
case result of
Right n -> do
HH.annotateShow n
HH.footnote "Expected decoding failure, but got success"
HH.failure
Left rowDecodeErr ->
case MarshallError.marshallErrorDetails rowDecodeErr of
MarshallError.DecodingError details ->
map fst (MarshallError.decodingErrorValues details) === [B8.pack targetName]
err -> do
HH.annotate $ MarshallError.renderMarshallErrorDetails ErrorDetailLevel.maximalErrorDetailLevel err
HH.footnote "Expected DecodingError error, but got another error instead."
HH.failure
prop_marshallResultFromSql_Foo :: Property.NamedProperty
prop_marshallResultFromSql_Foo =
Property.namedProperty "marshallResultFromSql decodes all rows in Foo result set" $ do
foos <- HH.forAll $ Gen.list (Range.linear 0 10) generateFoo
let mkRowValues foo =
[ SqlValue.fromText (fooName foo)
, SqlValue.fromInt32 (fooSize foo)
, maybe SqlValue.sqlNull SqlValue.fromBool (fooOption foo)
]
input =
Result.mkFakeLibPQResult
[B8.pack "name", B8.pack "size", B8.pack "option"]
(map mkRowValues foos)
result <- marshallTestRowFromSql fooMarshaller input
Bifunctor.first show result === Right foos
prop_marshallResultFromSql_Bar :: Property.NamedProperty
prop_marshallResultFromSql_Bar =
Property.namedProperty "marshallResultFromSql decodes all rows in Bar result set" $ do
bars <- HH.forAll $ Gen.list (Range.linear 0 10) generateBar
let mkRowValues bar =
[ SqlValue.fromDouble (barNumber bar)
, maybe SqlValue.sqlNull SqlValue.fromText (barComment bar)
, maybe SqlValue.sqlNull SqlValue.fromText (barLabel bar)
]
input =
Result.mkFakeLibPQResult
[B8.pack "number", B8.pack "comment", B8.pack "label"]
(map mkRowValues bars)
result <- marshallTestRowFromSql barMarshaller input
Bifunctor.first show result === Right bars
prop_foldMarshallerFields :: Property.NamedProperty
prop_foldMarshallerFields =
Property.namedProperty "foldMarshallerFields collects all fields as their sql values" $ do
foo <- HH.forAll generateFoo
let addField entry fields =
case entry of
SqlMarshaller.Natural fieldDef (Just getValue) ->
(FieldDefinition.fieldName fieldDef, FieldDefinition.fieldValueToSqlValue fieldDef (getValue foo)) : fields
SqlMarshaller.Natural _ Nothing ->
fields
SqlMarshaller.Synthetic _ ->
fields
actualFooRow =
SqlMarshaller.foldMarshallerFields
fooMarshaller
[]
addField
expectedFooRow =
[ (FieldDefinition.stringToFieldName "name", SqlValue.fromText $ fooName foo)
, (FieldDefinition.stringToFieldName "size", SqlValue.fromInt32 $ fooSize foo)
, (FieldDefinition.stringToFieldName "option", maybe SqlValue.sqlNull SqlValue.fromBool $ fooOption foo)
]
[actualFooRow] `assertEqualSqlRows` [expectedFooRow]
prop_passMaybeThrough :: Property.NamedProperty
prop_passMaybeThrough =
Property.namedProperty "can pass a Maybe through SqlMarshaller" $ do
someMaybeBool <- HH.forAll $ Gen.maybe Gen.bool
result <- marshallTestRowFromSql (pure someMaybeBool) (Result.mkFakeLibPQResult [] [[]])
Bifunctor.first show result === Right [someMaybeBool]
prop_partialMap :: Property.NamedProperty
prop_partialMap =
Property.namedProperty "can use marshallPartial to fail decoding with in a custom way" $ do
texts <- HH.forAll $ Gen.list (Range.linear 0 10) (PgGen.pgText (Range.linear 0 10))
let validateText text =
if T.length text > 8
then Left "Text too long"
else Right text
mkRowValues text =
[ SqlValue.fromText text
]
input =
Result.mkFakeLibPQResult
[B8.pack "text"]
(map mkRowValues texts)
marshaller =
SqlMarshaller.marshallPartial $
validateText
<$> SqlMarshaller.marshallField id (FieldDefinition.unboundedTextField "text")
mkExpected text =
case validateText text of
Right validText ->
Right validText
Left message ->
Left $
-- Use show here to render the error so that MarshallError
-- and friends don't need to have an Eq instance
show $
MarshallError.MarshallError
{ MarshallError.marshallErrorDetailLevel = ErrorDetailLevel.maximalErrorDetailLevel
, MarshallError.marshallErrorRowIdentifier = mempty
, MarshallError.marshallErrorDetails =
MarshallError.DecodingError $
MarshallError.DecodingErrorDetails
{ MarshallError.decodingErrorValues = [(B8.pack "text", SqlValue.fromText text)]
, MarshallError.decodingErrorMessage = message
}
}
expected =
traverse mkExpected texts
HH.cover 1 (String.fromString "With no errors") (Either.isRight expected)
HH.cover 1 (String.fromString "With at least one error") (Either.isLeft expected)
result <- marshallTestRowFromSql marshaller input
Bifunctor.first show result === expected
data Foo = Foo
{ fooName :: T.Text
, fooSize :: Int.Int32
, fooOption :: Maybe Bool
}
deriving (Eq, Show)
data Bar = Bar
{ barNumber :: Double
, barComment :: Maybe T.Text
, barLabel :: Maybe T.Text
}
deriving (Eq, Show)
fooMarshaller :: SqlMarshaller.SqlMarshaller Foo Foo
fooMarshaller =
Foo
<$> SqlMarshaller.marshallField fooName (FieldDefinition.unboundedTextField "name")
<*> SqlMarshaller.marshallField fooSize (FieldDefinition.integerField "size")
<*> SqlMarshaller.marshallField fooOption (FieldDefinition.nullableField $ FieldDefinition.booleanField "option")
generateFoo :: HH.Gen Foo
generateFoo =
Foo
<$> PgGen.pgText (Range.linear 0 16)
<*> generateInt32
<*> Gen.maybe Gen.bool
barMarshaller :: SqlMarshaller.SqlMarshaller Bar Bar
barMarshaller =
Bar
<$> SqlMarshaller.marshallField barNumber (FieldDefinition.doubleField "number")
<*> SqlMarshaller.marshallField barComment (FieldDefinition.nullableField $ FieldDefinition.unboundedTextField "comment")
<*> SqlMarshaller.marshallField barLabel (FieldDefinition.nullableField $ FieldDefinition.boundedTextField "label" 16)
generateBar :: HH.Gen Bar
generateBar =
Bar
<$> PgGen.pgDouble
<*> Gen.maybe (PgGen.pgText $ Range.linear 0 32)
<*> Gen.maybe (PgGen.pgText $ Range.linear 1 16)
generateNamesOtherThan :: String -> HH.Gen [String]
generateNamesOtherThan specialName =
Gen.list
(Range.linear 0 10)
(generateNameOtherThan specialName)
generateAssociatedValues :: [key] -> HH.Gen value -> HH.Gen [value]
generateAssociatedValues keys genValue =
traverse (const genValue) keys
generateInt32 :: HH.Gen Int.Int32
generateInt32 =
Gen.int32 (Range.exponentialFrom 0 minBound maxBound)
generateNameOtherThan :: String -> HH.Gen String
generateNameOtherThan specialName =
Gen.filter (/= specialName) generateName
generateName :: HH.Gen String
generateName =
Gen.string (Range.linear 1 256) Gen.alphaNum
generateInt :: HH.MonadGen m => m Int
generateInt = Gen.int $ Range.exponential 1 1024
marshallTestRowFromSql ::
( HH.MonadTest m
, MIO.MonadIO m
, Result.ExecutionResult result
) =>
SqlMarshaller.SqlMarshaller writeEntity readEntity ->
result ->
m (Either MarshallError.MarshallError [readEntity])
marshallTestRowFromSql marshaller input =
HH.evalIO $
SqlMarshaller.marshallResultFromSqlUsingRowIdExtractor
ErrorDetailLevel.maximalErrorDetailLevel
(SqlMarshaller.mkRowIdentityExtractor [] input)
marshaller
input
|
flipstone/orville
|
orville-postgresql-libpq/test/Test/SqlMarshaller.hs
|
mit
| 13,412 | 0 | 23 | 2,946 | 3,018 | 1,536 | 1,482 | 277 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
module Lib
( startApp
) where
import Network.Wai
import Network.Wai.Handler.Warp
import Network.JSONApi.Document (Document)
import Servant
import Users
import qualified Users.Controller as Controller
type API = "users" :> Get '[JSON] (Document User)
:<|> "users" :> Capture "id" Int :> "simple" :> Get '[JSON] (Document User)
:<|> "users" :> Capture "id" Int :> "full" :> Get '[JSON] (Document User)
startApp :: IO ()
startApp = run 8080 app
app :: Application
app = serve api server
api :: Proxy API
api = Proxy
server :: Server API
server = Controller.usersIndex
:<|> Controller.userShowSimple
:<|> Controller.userShowFull
|
toddmohney/json-api
|
example/src/Lib.hs
|
mit
| 804 | 0 | 16 | 161 | 225 | 124 | 101 | 25 | 1 |
module Feature.QuerySpec where
import Test.Hspec hiding (pendingWith)
import Test.Hspec.Wai
import Test.Hspec.Wai.JSON
import Network.HTTP.Types
import Network.Wai.Test (SResponse(simpleHeaders,simpleStatus,simpleBody))
import qualified Data.ByteString.Lazy as BL (empty)
import SpecHelper
import Text.Heredoc
import Network.Wai (Application)
import Protolude hiding (get)
spec :: SpecWith Application
spec = do
describe "Querying a table with a column called count" $
it "should not confuse count column with pg_catalog.count aggregate" $
get "/has_count_column" `shouldRespondWith` 200
describe "Querying a table with a column called t" $
it "should not conflict with internal postgrest table alias" $
get "/clashing_column?select=t" `shouldRespondWith` 200
describe "Querying a nonexistent table" $
it "causes a 404" $
get "/faketable" `shouldRespondWith` 404
describe "Filtering response" $ do
it "matches with equality" $
get "/items?id=eq.5"
`shouldRespondWith` [json| [{"id":5}] |]
{ matchHeaders = ["Content-Range" <:> "0-0/*"] }
it "matches with equality using not operator" $
get "/items?id=not.eq.5"
`shouldRespondWith` [json| [{"id":1},{"id":2},{"id":3},{"id":4},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13},{"id":14},{"id":15}] |]
{ matchHeaders = ["Content-Range" <:> "0-13/*"] }
it "matches with more than one condition using not operator" $
get "/simple_pk?k=like.*yx&extra=not.eq.u" `shouldRespondWith` "[]"
it "matches with inequality using not operator" $ do
get "/items?id=not.lt.14&order=id.asc"
`shouldRespondWith` [json| [{"id":14},{"id":15}] |]
{ matchHeaders = ["Content-Range" <:> "0-1/*"] }
get "/items?id=not.gt.2&order=id.asc"
`shouldRespondWith` [json| [{"id":1},{"id":2}] |]
{ matchHeaders = ["Content-Range" <:> "0-1/*"] }
it "matches items IN" $
get "/items?id=in.1,3,5"
`shouldRespondWith` [json| [{"id":1},{"id":3},{"id":5}] |]
{ matchHeaders = ["Content-Range" <:> "0-2/*"] }
it "matches items NOT IN" $
get "/items?id=notin.2,4,6,7,8,9,10,11,12,13,14,15"
`shouldRespondWith` [json| [{"id":1},{"id":3},{"id":5}] |]
{ matchHeaders = ["Content-Range" <:> "0-2/*"] }
it "matches items NOT IN using not operator" $
get "/items?id=not.in.2,4,6,7,8,9,10,11,12,13,14,15"
`shouldRespondWith` [json| [{"id":1},{"id":3},{"id":5}] |]
{ matchHeaders = ["Content-Range" <:> "0-2/*"] }
it "matches nulls using not operator" $
get "/no_pk?a=not.is.null" `shouldRespondWith`
[json| [{"a":"1","b":"0"},{"a":"2","b":"0"}] |]
{ matchHeaders = [matchContentTypeJson] }
it "matches nulls in varchar and numeric fields alike" $ do
get "/no_pk?a=is.null" `shouldRespondWith`
[json| [{"a": null, "b": null}] |]
{ matchHeaders = [matchContentTypeJson] }
get "/nullable_integer?a=is.null" `shouldRespondWith` [str|[{"a":null}]|]
it "matches with like" $ do
get "/simple_pk?k=like.*yx" `shouldRespondWith`
[str|[{"k":"xyyx","extra":"u"}]|]
get "/simple_pk?k=like.xy*" `shouldRespondWith`
[str|[{"k":"xyyx","extra":"u"}]|]
get "/simple_pk?k=like.*YY*" `shouldRespondWith`
[str|[{"k":"xYYx","extra":"v"}]|]
it "matches with like using not operator" $
get "/simple_pk?k=not.like.*yx" `shouldRespondWith`
[str|[{"k":"xYYx","extra":"v"}]|]
it "matches with ilike" $ do
get "/simple_pk?k=ilike.xy*&order=extra.asc" `shouldRespondWith`
[str|[{"k":"xyyx","extra":"u"},{"k":"xYYx","extra":"v"}]|]
get "/simple_pk?k=ilike.*YY*&order=extra.asc" `shouldRespondWith`
[str|[{"k":"xyyx","extra":"u"},{"k":"xYYx","extra":"v"}]|]
it "matches with ilike using not operator" $
get "/simple_pk?k=not.ilike.xy*&order=extra.asc" `shouldRespondWith` "[]"
it "matches with tsearch @@" $
get "/tsearch?text_search_vector=@@.foo" `shouldRespondWith`
[json| [{"text_search_vector":"'bar':2 'foo':1"}] |]
{ matchHeaders = [matchContentTypeJson] }
it "matches with tsearch @@ using not operator" $
get "/tsearch?text_search_vector=not.@@.foo" `shouldRespondWith`
[json| [{"text_search_vector":"'baz':1 'qux':2"}] |]
{ matchHeaders = [matchContentTypeJson] }
it "matches with computed column" $
get "/items?always_true=eq.true&order=id.asc" `shouldRespondWith`
[json| [{"id":1},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13},{"id":14},{"id":15}] |]
{ matchHeaders = [matchContentTypeJson] }
it "order by computed column" $
get "/items?order=anti_id.desc" `shouldRespondWith`
[json| [{"id":1},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13},{"id":14},{"id":15}] |]
{ matchHeaders = [matchContentTypeJson] }
it "matches filtering nested items 2" $
get "/clients?select=id,projects{id,tasks2{id,name}}&projects.tasks.name=like.Design*"
`shouldRespondWith` [json| {"message":"Could not find foreign keys between these entities, No relation found between projects and tasks2"}|]
{ matchStatus = 400
, matchHeaders = [matchContentTypeJson]
}
it "matches filtering nested items" $
get "/clients?select=id,projects{id,tasks{id,name}}&projects.tasks.name=like.Design*" `shouldRespondWith`
[str|[{"id":1,"projects":[{"id":1,"tasks":[{"id":1,"name":"Design w7"}]},{"id":2,"tasks":[{"id":3,"name":"Design w10"}]}]},{"id":2,"projects":[{"id":3,"tasks":[{"id":5,"name":"Design IOS"}]},{"id":4,"tasks":[{"id":7,"name":"Design OSX"}]}]}]|]
it "matches with @> operator" $
get "/complex_items?select=id&arr_data=@>.{2}" `shouldRespondWith`
[str|[{"id":2},{"id":3}]|]
it "matches with <@ operator" $
get "/complex_items?select=id&arr_data=<@.{1,2,4}" `shouldRespondWith`
[str|[{"id":1},{"id":2}]|]
describe "Shaping response with select parameter" $ do
it "selectStar works in absense of parameter" $
get "/complex_items?id=eq.3" `shouldRespondWith`
[str|[{"id":3,"name":"Three","settings":{"foo":{"int":1,"bar":"baz"}},"arr_data":[1,2,3],"field-with_sep":1}]|]
it "dash `-` in column names is accepted" $
get "/complex_items?id=eq.3&select=id,field-with_sep" `shouldRespondWith`
[str|[{"id":3,"field-with_sep":1}]|]
it "one simple column" $
get "/complex_items?select=id" `shouldRespondWith`
[json| [{"id":1},{"id":2},{"id":3}] |]
{ matchHeaders = [matchContentTypeJson] }
it "rename simple column" $
get "/complex_items?id=eq.1&select=myId:id" `shouldRespondWith`
[json| [{"myId":1}] |]
{ matchHeaders = [matchContentTypeJson] }
it "one simple column with casting (text)" $
get "/complex_items?select=id::text" `shouldRespondWith`
[json| [{"id":"1"},{"id":"2"},{"id":"3"}] |]
{ matchHeaders = [matchContentTypeJson] }
it "rename simple column with casting" $
get "/complex_items?id=eq.1&select=myId:id::text" `shouldRespondWith`
[json| [{"myId":"1"}] |]
{ matchHeaders = [matchContentTypeJson] }
it "json column" $
get "/complex_items?id=eq.1&select=settings" `shouldRespondWith`
[json| [{"settings":{"foo":{"int":1,"bar":"baz"}}}] |]
{ matchHeaders = [matchContentTypeJson] }
it "json subfield one level with casting (json)" $
get "/complex_items?id=eq.1&select=settings->>foo::json" `shouldRespondWith`
[json| [{"foo":{"int":1,"bar":"baz"}}] |] -- the value of foo here is of type "text"
{ matchHeaders = [matchContentTypeJson] }
it "rename json subfield one level with casting (json)" $
get "/complex_items?id=eq.1&select=myFoo:settings->>foo::json" `shouldRespondWith`
[json| [{"myFoo":{"int":1,"bar":"baz"}}] |] -- the value of foo here is of type "text"
{ matchHeaders = [matchContentTypeJson] }
it "fails on bad casting (data of the wrong format)" $
get "/complex_items?select=settings->foo->>bar::integer"
`shouldRespondWith` [json| {"hint":null,"details":null,"code":"22P02","message":"invalid input syntax for integer: \"baz\""} |]
{ matchStatus = 400
, matchHeaders = []
}
it "fails on bad casting (wrong cast type)" $
get "/complex_items?select=id::fakecolumntype"
`shouldRespondWith` [json| {"hint":null,"details":null,"code":"42704","message":"type \"fakecolumntype\" does not exist"} |]
{ matchStatus = 400
, matchHeaders = []
}
it "json subfield two levels (string)" $
get "/complex_items?id=eq.1&select=settings->foo->>bar" `shouldRespondWith`
[json| [{"bar":"baz"}] |]
{ matchHeaders = [matchContentTypeJson] }
it "rename json subfield two levels (string)" $
get "/complex_items?id=eq.1&select=myBar:settings->foo->>bar" `shouldRespondWith`
[json| [{"myBar":"baz"}] |]
{ matchHeaders = [matchContentTypeJson] }
it "json subfield two levels with casting (int)" $
get "/complex_items?id=eq.1&select=settings->foo->>int::integer" `shouldRespondWith`
[json| [{"int":1}] |] -- the value in the db is an int, but here we expect a string for now
{ matchHeaders = [matchContentTypeJson] }
it "rename json subfield two levels with casting (int)" $
get "/complex_items?id=eq.1&select=myInt:settings->foo->>int::integer" `shouldRespondWith`
[json| [{"myInt":1}] |] -- the value in the db is an int, but here we expect a string for now
{ matchHeaders = [matchContentTypeJson] }
it "requesting parents and children" $
get "/projects?id=eq.1&select=id, name, clients{*}, tasks{id, name}" `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7","clients":{"id":1,"name":"Microsoft"},"tasks":[{"id":1,"name":"Design w7"},{"id":2,"name":"Code w7"}]}]|]
it "embed data with two fk pointing to the same table" $
get "/orders?id=eq.1&select=id, name, billing_address_id{id}, shipping_address_id{id}" `shouldRespondWith`
[str|[{"id":1,"name":"order 1","billing_address_id":{"id":1},"shipping_address_id":{"id":2}}]|]
it "requesting parents and children while renaming them" $
get "/projects?id=eq.1&select=myId:id, name, project_client:client_id{*}, project_tasks:tasks{id, name}" `shouldRespondWith`
[str|[{"myId":1,"name":"Windows 7","project_client":{"id":1,"name":"Microsoft"},"project_tasks":[{"id":1,"name":"Design w7"},{"id":2,"name":"Code w7"}]}]|]
it "requesting parents two levels up while using FK to specify the link" $
get "/tasks?id=eq.1&select=id,name,project:project_id{id,name,client:client_id{id,name}}" `shouldRespondWith`
[str|[{"id":1,"name":"Design w7","project":{"id":1,"name":"Windows 7","client":{"id":1,"name":"Microsoft"}}}]|]
it "requesting parents two levels up while using FK to specify the link (with rename)" $
get "/tasks?id=eq.1&select=id,name,project:project_id{id,name,client:client_id{id,name}}" `shouldRespondWith`
[str|[{"id":1,"name":"Design w7","project":{"id":1,"name":"Windows 7","client":{"id":1,"name":"Microsoft"}}}]|]
it "requesting parents and filtering parent columns" $
get "/projects?id=eq.1&select=id, name, clients{id}" `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7","clients":{"id":1}}]|]
it "rows with missing parents are included" $
get "/projects?id=in.1,5&select=id,clients{id}" `shouldRespondWith`
[str|[{"id":1,"clients":{"id":1}},{"id":5,"clients":null}]|]
it "rows with no children return [] instead of null" $
get "/projects?id=in.5&select=id,tasks{id}" `shouldRespondWith`
[str|[{"id":5,"tasks":[]}]|]
it "requesting children 2 levels" $
get "/clients?id=eq.1&select=id,projects{id,tasks{id}}" `shouldRespondWith`
[str|[{"id":1,"projects":[{"id":1,"tasks":[{"id":1},{"id":2}]},{"id":2,"tasks":[{"id":3},{"id":4}]}]}]|]
it "requesting many<->many relation" $
get "/tasks?select=id,users{id}" `shouldRespondWith`
[str|[{"id":1,"users":[{"id":1},{"id":3}]},{"id":2,"users":[{"id":1}]},{"id":3,"users":[{"id":1}]},{"id":4,"users":[{"id":1}]},{"id":5,"users":[{"id":2},{"id":3}]},{"id":6,"users":[{"id":2}]},{"id":7,"users":[{"id":2}]},{"id":8,"users":[]}]|]
it "requesting many<->many relation with rename" $
get "/tasks?id=eq.1&select=id,theUsers:users{id}" `shouldRespondWith`
[str|[{"id":1,"theUsers":[{"id":1},{"id":3}]}]|]
it "requesting many<->many relation reverse" $
get "/users?select=id,tasks{id}" `shouldRespondWith`
[str|[{"id":1,"tasks":[{"id":1},{"id":2},{"id":3},{"id":4}]},{"id":2,"tasks":[{"id":5},{"id":6},{"id":7}]},{"id":3,"tasks":[{"id":1},{"id":5}]}]|]
it "requesting parents and children on views" $
get "/projects_view?id=eq.1&select=id, name, clients{*}, tasks{id, name}" `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7","clients":{"id":1,"name":"Microsoft"},"tasks":[{"id":1,"name":"Design w7"},{"id":2,"name":"Code w7"}]}]|]
it "requesting parents and children on views with renamed keys" $
get "/projects_view_alt?t_id=eq.1&select=t_id, name, clients{*}, tasks{id, name}" `shouldRespondWith`
[str|[{"t_id":1,"name":"Windows 7","clients":{"id":1,"name":"Microsoft"},"tasks":[{"id":1,"name":"Design w7"},{"id":2,"name":"Code w7"}]}]|]
it "requesting children with composite key" $
get "/users_tasks?user_id=eq.2&task_id=eq.6&select=*, comments{content}" `shouldRespondWith`
[str|[{"user_id":2,"task_id":6,"comments":[{"content":"Needs to be delivered ASAP"}]}]|]
it "detect relations in views from exposed schema that are based on tables in private schema and have columns renames" $
get "/articles?id=eq.1&select=id,articleStars{users{*}}" `shouldRespondWith`
[str|[{"id":1,"articleStars":[{"users":{"id":1,"name":"Angela Martin"}},{"users":{"id":2,"name":"Michael Scott"}},{"users":{"id":3,"name":"Dwight Schrute"}}]}]|]
it "can select by column name" $
get "/projects?id=in.1,3&select=id,name,client_id,client_id{id,name}" `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7","client_id":1,"client_id":{"id":1,"name":"Microsoft"}},{"id":3,"name":"IOS","client_id":2,"client_id":{"id":2,"name":"Apple"}}]|]
it "can select by column name sans id" $
get "/projects?id=in.1,3&select=id,name,client_id,client{id,name}" `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7","client_id":1,"client":{"id":1,"name":"Microsoft"}},{"id":3,"name":"IOS","client_id":2,"client":{"id":2,"name":"Apple"}}]|]
it "can detect fk relations through views to tables in the public schema" $
get "/consumers_view?select=*,orders_view{*}" `shouldRespondWith` 200
describe "ordering response" $ do
it "by a column asc" $
get "/items?id=lte.2&order=id.asc"
`shouldRespondWith` [json| [{"id":1},{"id":2}] |]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-1/*"]
}
it "by a column desc" $
get "/items?id=lte.2&order=id.desc"
`shouldRespondWith` [json| [{"id":2},{"id":1}] |]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-1/*"]
}
it "by a column with nulls first" $
get "/no_pk?order=a.nullsfirst"
`shouldRespondWith` [json| [{"a":null,"b":null},
{"a":"1","b":"0"},
{"a":"2","b":"0"}
] |]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-2/*"]
}
it "by a column asc with nulls last" $
get "/no_pk?order=a.asc.nullslast"
`shouldRespondWith` [json| [{"a":"1","b":"0"},
{"a":"2","b":"0"},
{"a":null,"b":null}] |]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-2/*"]
}
it "by a column desc with nulls first" $
get "/no_pk?order=a.desc.nullsfirst"
`shouldRespondWith` [json| [{"a":null,"b":null},
{"a":"2","b":"0"},
{"a":"1","b":"0"}] |]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-2/*"]
}
it "by a column desc with nulls last" $
get "/no_pk?order=a.desc.nullslast"
`shouldRespondWith` [json| [{"a":"2","b":"0"},
{"a":"1","b":"0"},
{"a":null,"b":null}] |]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-2/*"]
}
it "by a json column property asc" $
get "/json?order=data->>id.asc" `shouldRespondWith`
[json| [{"data": {"id": 0}}, {"data": {"id": 1, "foo": {"bar": "baz"}}}, {"data": {"id": 3}}] |]
{ matchHeaders = [matchContentTypeJson] }
it "by a json column with two level property nulls first" $
get "/json?order=data->foo->>bar.nullsfirst" `shouldRespondWith`
[json| [{"data": {"id": 3}}, {"data": {"id": 0}}, {"data": {"id": 1, "foo": {"bar": "baz"}}}] |]
{ matchHeaders = [matchContentTypeJson] }
it "without other constraints" $
get "/items?order=id.asc" `shouldRespondWith` 200
it "ordering embeded entities" $
get "/projects?id=eq.1&select=id, name, tasks{id, name}&tasks.order=name.asc" `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7","tasks":[{"id":2,"name":"Code w7"},{"id":1,"name":"Design w7"}]}]|]
it "ordering embeded entities with alias" $
get "/projects?id=eq.1&select=id, name, the_tasks:tasks{id, name}&tasks.order=name.asc" `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7","the_tasks":[{"id":2,"name":"Code w7"},{"id":1,"name":"Design w7"}]}]|]
it "ordering embeded entities, two levels" $
get "/projects?id=eq.1&select=id, name, tasks{id, name, users{id, name}}&tasks.order=name.asc&tasks.users.order=name.desc" `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7","tasks":[{"id":2,"name":"Code w7","users":[{"id":1,"name":"Angela Martin"}]},{"id":1,"name":"Design w7","users":[{"id":3,"name":"Dwight Schrute"},{"id":1,"name":"Angela Martin"}]}]}]|]
it "ordering embeded parents does not break things" $
get "/projects?id=eq.1&select=id, name, clients{id, name}&clients.order=name.asc" `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7","clients":{"id":1,"name":"Microsoft"}}]|]
it "ordering embeded parents does not break things when using ducktape names" $
get "/projects?id=eq.1&select=id, name, client{id, name}&client.order=name.asc" `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7","client":{"id":1,"name":"Microsoft"}}]|]
describe "Accept headers" $ do
it "should respond an unknown accept type with 415" $
request methodGet "/simple_pk"
(acceptHdrs "text/unknowntype") ""
`shouldRespondWith` 415
it "should respond correctly to */* in accept header" $
request methodGet "/simple_pk"
(acceptHdrs "*/*") ""
`shouldRespondWith` 200
it "*/* should rescue an unknown type" $
request methodGet "/simple_pk"
(acceptHdrs "text/unknowntype, */*") ""
`shouldRespondWith` 200
it "specific available preference should override */*" $ do
r <- request methodGet "/simple_pk"
(acceptHdrs "text/csv, */*") ""
liftIO $ do
let respHeaders = simpleHeaders r
respHeaders `shouldSatisfy` matchHeader
"Content-Type" "text/csv; charset=utf-8"
it "honors client preference even when opposite of server preference" $ do
r <- request methodGet "/simple_pk"
(acceptHdrs "text/csv, application/json") ""
liftIO $ do
let respHeaders = simpleHeaders r
respHeaders `shouldSatisfy` matchHeader
"Content-Type" "text/csv; charset=utf-8"
it "should respond correctly to multiple types in accept header" $
request methodGet "/simple_pk"
(acceptHdrs "text/unknowntype, text/csv") ""
`shouldRespondWith` 200
it "should respond with CSV to 'text/csv' request" $
request methodGet "/simple_pk"
(acceptHdrs "text/csv; version=1") ""
`shouldRespondWith` "k,extra\nxyyx,u\nxYYx,v"
{ matchStatus = 200
, matchHeaders = ["Content-Type" <:> "text/csv; charset=utf-8"]
}
describe "Canonical location" $ do
it "Sets Content-Location with alphabetized params" $
get "/no_pk?b=eq.1&a=eq.1"
`shouldRespondWith` "[]"
{ matchStatus = 200
, matchHeaders = ["Content-Location" <:> "/no_pk?a=eq.1&b=eq.1"]
}
it "Omits question mark when there are no params" $ do
r <- get "/simple_pk"
liftIO $ do
let respHeaders = simpleHeaders r
respHeaders `shouldSatisfy` matchHeader
"Content-Location" "/simple_pk"
describe "jsonb" $ do
it "can filter by properties inside json column" $ do
get "/json?data->foo->>bar=eq.baz" `shouldRespondWith`
[json| [{"data": {"id": 1, "foo": {"bar": "baz"}}}] |]
{ matchHeaders = [matchContentTypeJson] }
get "/json?data->foo->>bar=eq.fake" `shouldRespondWith`
[json| [] |]
{ matchHeaders = [matchContentTypeJson] }
it "can filter by properties inside json column using not" $
get "/json?data->foo->>bar=not.eq.baz" `shouldRespondWith`
[json| [] |]
{ matchHeaders = [matchContentTypeJson] }
it "can filter by properties inside json column using ->>" $
get "/json?data->>id=eq.1" `shouldRespondWith`
[json| [{"data": {"id": 1, "foo": {"bar": "baz"}}}] |]
{ matchHeaders = [matchContentTypeJson] }
describe "remote procedure call" $ do
context "a proc that returns a set" $ do
it "returns paginated results" $
request methodPost "/rpc/getitemrange"
(rangeHdrs (ByteRangeFromTo 0 0)) [json| { "min": 2, "max": 4 } |]
`shouldRespondWith` [json| [{"id":3}] |]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-0/*"]
}
it "includes total count if requested" $
request methodPost "/rpc/getitemrange"
(rangeHdrsWithCount (ByteRangeFromTo 0 0))
[json| { "min": 2, "max": 4 } |]
`shouldRespondWith` [json| [{"id":3}] |]
{ matchStatus = 206 -- it now knows the response is partial
, matchHeaders = ["Content-Range" <:> "0-0/2"]
}
it "returns proper json" $
post "/rpc/getitemrange" [json| { "min": 2, "max": 4 } |] `shouldRespondWith`
[json| [ {"id": 3}, {"id":4} ] |]
{ matchHeaders = [matchContentTypeJson] }
it "returns CSV" $
request methodPost "/rpc/getitemrange"
(acceptHdrs "text/csv")
[json| { "min": 2, "max": 4 } |]
`shouldRespondWith` "id\n3\n4"
{ matchStatus = 200
, matchHeaders = ["Content-Type" <:> "text/csv; charset=utf-8"]
}
context "unknown function" $
it "returns 404" $
post "/rpc/fakefunc" [json| {} |] `shouldRespondWith` 404
context "shaping the response returned by a proc" $ do
it "returns a project" $
post "/rpc/getproject" [json| { "id": 1} |] `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7","client_id":1}]|]
it "can filter proc results" $
post "/rpc/getallprojects?id=gt.1&id=lt.5&select=id" [json| {} |] `shouldRespondWith`
[json|[{"id":2},{"id":3},{"id":4}]|]
{ matchHeaders = [matchContentTypeJson] }
it "can limit proc results" $
post "/rpc/getallprojects?id=gt.1&id=lt.5&select=id?limit=2&offset=1" [json| {} |]
`shouldRespondWith` [json|[{"id":3},{"id":4}]|]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "1-2/*"]
}
it "select works on the first level" $
post "/rpc/getproject?select=id,name" [json| { "id": 1} |] `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7"}]|]
context "foreign entities embedding" $ do
it "can embed if related tables are in the exposed schema" $
post "/rpc/getproject?select=id,name,client{id},tasks{id}" [json| { "id": 1} |] `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7","client":{"id":1},"tasks":[{"id":1},{"id":2}]}]|]
it "cannot embed if the related table is not in the exposed schema" $
post "/rpc/single_article?select=*,article_stars{*}" [json|{ "id": 1}|]
`shouldRespondWith` 400
it "can embed if the related tables are in a hidden schema but exposed as views" $
post "/rpc/single_article?select=id,articleStars{userId}" [json|{ "id": 2}|]
`shouldRespondWith` [json|[{"id": 2, "articleStars": [{"userId": 3}]}]|]
{ matchHeaders = [matchContentTypeJson] }
context "a proc that returns an empty rowset" $
it "returns empty json array" $
post "/rpc/test_empty_rowset" [json| {} |] `shouldRespondWith`
[json| [] |]
{ matchHeaders = [matchContentTypeJson] }
context "proc return types" $ do
context "returns text" $ do
it "returns proper json" $
post "/rpc/sayhello" [json| { "name": "world" } |] `shouldRespondWith`
[json|"Hello, world"|]
{ matchHeaders = [matchContentTypeJson] }
it "can handle unicode" $
post "/rpc/sayhello" [json| { "name": "¥" } |] `shouldRespondWith`
[json|"Hello, ¥"|]
{ matchHeaders = [matchContentTypeJson] }
it "returns enum value" $
post "/rpc/ret_enum" [json|{ "val": "foo" }|] `shouldRespondWith`
[json|"foo"|]
{ matchHeaders = [matchContentTypeJson] }
it "returns domain value" $
post "/rpc/ret_domain" [json|{ "val": "8" }|] `shouldRespondWith`
[json|8|]
{ matchHeaders = [matchContentTypeJson] }
it "returns range" $
post "/rpc/ret_range" [json|{ "low": 10, "up": 20 }|] `shouldRespondWith`
[json|"[10,20)"|]
{ matchHeaders = [matchContentTypeJson] }
it "returns row of scalars" $
post "/rpc/ret_scalars" [json|{}|] `shouldRespondWith`
[json|[{"a":"scalars", "b":"foo", "c":1, "d":"[10,20)"}]|]
{ matchHeaders = [matchContentTypeJson] }
it "returns composite type in exposed schema" $
post "/rpc/ret_point_2d" [json|{}|] `shouldRespondWith`
[json|[{"x": 10, "y": 5}]|]
{ matchHeaders = [matchContentTypeJson] }
it "cannot return composite type in hidden schema" $
post "/rpc/ret_point_3d" [json|{}|] `shouldRespondWith` 401
it "returns single row from table" $
post "/rpc/single_article?select=id" [json|{"id": 2}|] `shouldRespondWith`
[json|[{"id": 2}]|]
{ matchHeaders = [matchContentTypeJson] }
it "returns null for void" $
post "/rpc/ret_void" [json|{}|] `shouldRespondWith`
[json|null|]
{ matchHeaders = [matchContentTypeJson] }
context "improper input" $ do
it "rejects unknown content type even if payload is good" $
request methodPost "/rpc/sayhello"
(acceptHdrs "audio/mpeg3") [json| { "name": "world" } |]
`shouldRespondWith` 415
it "rejects malformed json payload" $ do
p <- request methodPost "/rpc/sayhello"
(acceptHdrs "application/json") "sdfsdf"
liftIO $ do
simpleStatus p `shouldBe` badRequest400
isErrorFormat (simpleBody p) `shouldBe` True
it "treats simple plpgsql raise as invalid input" $ do
p <- post "/rpc/problem" "{}"
liftIO $ do
simpleStatus p `shouldBe` badRequest400
isErrorFormat (simpleBody p) `shouldBe` True
context "unsupported verbs" $ do
it "DELETE fails" $
request methodDelete "/rpc/sayhello" [] ""
`shouldRespondWith` 405
it "PATCH fails" $
request methodPatch "/rpc/sayhello" [] ""
`shouldRespondWith` 405
it "OPTIONS fails" $
-- TODO: should return info about the function
request methodOptions "/rpc/sayhello" [] ""
`shouldRespondWith` 405
it "GET fails with 405 on unknown procs" $
-- TODO: should this be 404?
get "/rpc/fake" `shouldRespondWith` 405
it "GET with 405 on known procs" $
get "/rpc/sayhello" `shouldRespondWith` 405
it "executes the proc exactly once per request" $ do
post "/rpc/callcounter" [json| {} |] `shouldRespondWith`
[json|1|]
{ matchHeaders = [matchContentTypeJson] }
post "/rpc/callcounter" [json| {} |] `shouldRespondWith`
[json|2|]
{ matchHeaders = [matchContentTypeJson] }
context "expects a single json object" $ do
it "does not expand posted json into parameters" $
request methodPost "/rpc/singlejsonparam"
[("Prefer","params=single-object")] [json| { "p1": 1, "p2": "text", "p3" : {"obj":"text"} } |] `shouldRespondWith`
[json| { "p1": 1, "p2": "text", "p3" : {"obj":"text"} } |]
{ matchHeaders = [matchContentTypeJson] }
it "accepts parameters from an html form" $
request methodPost "/rpc/singlejsonparam"
[("Prefer","params=single-object"),("Content-Type", "application/x-www-form-urlencoded")]
("integer=7&double=2.71828&varchar=forms+are+fun&" <>
"boolean=false&date=1900-01-01&money=$3.99&enum=foo") `shouldRespondWith`
[json| { "integer": "7", "double": "2.71828", "varchar" : "forms are fun"
, "boolean":"false", "date":"1900-01-01", "money":"$3.99", "enum":"foo" } |]
{ matchHeaders = [matchContentTypeJson] }
context "a proc that receives no parameters" $
it "interprets empty string as empty json object on a post request" $
post "/rpc/noparamsproc" BL.empty `shouldRespondWith`
[json| "Return value of no parameters procedure." |]
{ matchHeaders = [matchContentTypeJson] }
describe "weird requests" $ do
it "can query as normal" $ do
get "/Escap3e;" `shouldRespondWith`
[json| [{"so6meIdColumn":1},{"so6meIdColumn":2},{"so6meIdColumn":3},{"so6meIdColumn":4},{"so6meIdColumn":5}] |]
{ matchHeaders = [matchContentTypeJson] }
get "/ghostBusters" `shouldRespondWith`
[json| [{"escapeId":1},{"escapeId":3},{"escapeId":5}] |]
{ matchHeaders = [matchContentTypeJson] }
it "fails if an operator is not given" $
get "/ghostBusters?id=0" `shouldRespondWith` [json| {"details":"unexpected \"0\" expecting \"not\" or operator (eq, gt, ...)","message":"\"failed to parse filter (0)\" (line 1, column 1)"} |]
{ matchStatus = 400
, matchHeaders = [matchContentTypeJson]
}
it "will embed a collection" $
get "/Escap3e;?select=ghostBusters{*}" `shouldRespondWith`
[json| [{"ghostBusters":[{"escapeId":1}]},{"ghostBusters":[]},{"ghostBusters":[{"escapeId":3}]},{"ghostBusters":[]},{"ghostBusters":[{"escapeId":5}]}] |]
{ matchHeaders = [matchContentTypeJson] }
it "will embed using a column" $
get "/ghostBusters?select=escapeId{*}" `shouldRespondWith`
[json| [{"escapeId":{"so6meIdColumn":1}},{"escapeId":{"so6meIdColumn":3}},{"escapeId":{"so6meIdColumn":5}}] |]
{ matchHeaders = [matchContentTypeJson] }
describe "binary output" $ do
it "can query if a single column is selected" $
request methodGet "/images_base64?select=img&name=eq.A.png" (acceptHdrs "application/octet-stream") ""
`shouldRespondWith` "iVBORw0KGgoAAAANSUhEUgAAAB4AAAAeAQMAAAAB/jzhAAAABlBMVEUAAAD/AAAb/40iAAAAP0lEQVQI12NgwAbYG2AE/wEYwQMiZB4ACQkQYZEAIgqAhAGIKLCAEQ8kgMT/P1CCEUwc4IMSzA3sUIIdCHECAGSQEkeOTUyCAAAAAElFTkSuQmCC"
{ matchStatus = 200
, matchHeaders = ["Content-Type" <:> "application/octet-stream; charset=utf-8"]
}
it "fails if a single column is not selected" $ do
request methodGet "/images?select=img,name&name=eq.A.png" (acceptHdrs "application/octet-stream") ""
`shouldRespondWith` 406
request methodGet "/images?select=*&name=eq.A.png" (acceptHdrs "application/octet-stream") ""
`shouldRespondWith` 406
request methodGet "/images?name=eq.A.png" (acceptHdrs "application/octet-stream") ""
`shouldRespondWith` 406
it "concatenates results if more than one row is returned" $
request methodGet "/images_base64?select=img&name=in.A.png,B.png" (acceptHdrs "application/octet-stream") ""
`shouldRespondWith` "iVBORw0KGgoAAAANSUhEUgAAAB4AAAAeAQMAAAAB/jzhAAAABlBMVEUAAAD/AAAb/40iAAAAP0lEQVQI12NgwAbYG2AE/wEYwQMiZB4ACQkQYZEAIgqAhAGIKLCAEQ8kgMT/P1CCEUwc4IMSzA3sUIIdCHECAGSQEkeOTUyCAAAAAElFTkSuQmCCiVBORw0KGgoAAAANSUhEUgAAAB4AAAAeAQMAAAAB/jzhAAAABlBMVEX///8AAP94wDzzAAAAL0lEQVQIW2NgwAb+HwARH0DEDyDxwAZEyGAhLODqHmBRzAcn5GAS///A1IF14AAA5/Adbiiz/0gAAAAASUVORK5CYII="
{ matchStatus = 200
, matchHeaders = ["Content-Type" <:> "application/octet-stream; charset=utf-8"]
}
describe "HTTP request env vars" $ do
it "custom header is set" $
request methodPost "/rpc/get_guc_value"
[("Custom-Header", "test")]
[json| { "name": "request.header.custom-header" } |]
`shouldRespondWith`
[str|"test"|]
{ matchStatus = 200
, matchHeaders = [ matchContentTypeJson ]
}
it "standard header is set" $
request methodPost "/rpc/get_guc_value"
[("Origin", "http://example.com")]
[json| { "name": "request.header.origin" } |]
`shouldRespondWith`
[str|"http://example.com"|]
{ matchStatus = 200
, matchHeaders = [ matchContentTypeJson ]
}
it "current role is available as GUC claim" $
request methodPost "/rpc/get_guc_value" []
[json| { "name": "request.jwt.claim.role" } |]
`shouldRespondWith`
[str|"postgrest_test_anonymous"|]
{ matchStatus = 200
, matchHeaders = [ matchContentTypeJson ]
}
it "single cookie ends up as claims" $
request methodPost "/rpc/get_guc_value" [("Cookie","acookie=cookievalue")]
[json| {"name":"request.cookie.acookie"} |]
`shouldRespondWith`
[str|"cookievalue"|]
{ matchStatus = 200
, matchHeaders = []
}
it "multiple cookies ends up as claims" $
request methodPost "/rpc/get_guc_value" [("Cookie","acookie=cookievalue;secondcookie=anothervalue")]
[json| {"name":"request.cookie.secondcookie"} |]
`shouldRespondWith`
[str|"anothervalue"|]
{ matchStatus = 200
, matchHeaders = []
}
describe "values with quotes in IN and NOTIN operators" $ do
it "succeeds when only quoted values are present" $ do
get "/w_or_wo_comma_names?name=in.\"Hebdon, John\"" `shouldRespondWith`
[json| [{"name":"Hebdon, John"}] |]
{ matchHeaders = [matchContentTypeJson] }
get "/w_or_wo_comma_names?name=in.\"Hebdon, John\",\"Williams, Mary\",\"Smith, Joseph\"" `shouldRespondWith`
[json| [{"name":"Hebdon, John"},{"name":"Williams, Mary"},{"name":"Smith, Joseph"}] |]
{ matchHeaders = [matchContentTypeJson] }
get "/w_or_wo_comma_names?name=notin.\"Hebdon, John\",\"Williams, Mary\",\"Smith, Joseph\"" `shouldRespondWith`
[json| [{"name":"David White"},{"name":"Larry Thompson"}] |]
{ matchHeaders = [matchContentTypeJson] }
get "/w_or_wo_comma_names?name=not.in.\"Hebdon, John\",\"Williams, Mary\",\"Smith, Joseph\"" `shouldRespondWith`
[json| [{"name":"David White"},{"name":"Larry Thompson"}] |]
{ matchHeaders = [matchContentTypeJson] }
it "succeeds w/ and w/o quoted values" $ do
get "/w_or_wo_comma_names?name=in.David White,\"Hebdon, John\"" `shouldRespondWith`
[json| [{"name":"Hebdon, John"},{"name":"David White"}] |]
{ matchHeaders = [matchContentTypeJson] }
get "/w_or_wo_comma_names?name=not.in.\"Hebdon, John\",Larry Thompson,\"Smith, Joseph\"" `shouldRespondWith`
[json| [{"name":"Williams, Mary"},{"name":"David White"}] |]
{ matchHeaders = [matchContentTypeJson] }
get "/w_or_wo_comma_names?name=notin.\"Hebdon, John\",David White,\"Williams, Mary\",Larry Thompson" `shouldRespondWith`
[json| [{"name":"Smith, Joseph"}] |]
{ matchHeaders = [matchContentTypeJson] }
it "checks well formed quoted values" $ do
get "/w_or_wo_comma_names?name=in.\"\"Hebdon, John\"" `shouldRespondWith`
[json| [] |] { matchHeaders = [matchContentTypeJson] }
get "/w_or_wo_comma_names?name=in.\"\"Hebdon, John\"\"Mary" `shouldRespondWith`
[json| [] |] { matchHeaders = [matchContentTypeJson] }
get "/w_or_wo_comma_names?name=in.Williams\"Hebdon, John\"" `shouldRespondWith`
[json| [] |] { matchHeaders = [matchContentTypeJson] }
describe "IN and NOT IN empty set" $ do
context "returns an empty result for IN when no value is present" $ do
it "works for integer" $
get "/items_with_different_col_types?int_data=in." `shouldRespondWith`
[json| [] |] { matchHeaders = [matchContentTypeJson] }
it "works for text" $
get "/items_with_different_col_types?text_data=in." `shouldRespondWith`
[json| [] |] { matchHeaders = [matchContentTypeJson] }
it "works for bool" $
get "/items_with_different_col_types?bool_data=in." `shouldRespondWith`
[json| [] |] { matchHeaders = [matchContentTypeJson] }
it "works for bytea" $
get "/items_with_different_col_types?bin_data=in." `shouldRespondWith`
[json| [] |] { matchHeaders = [matchContentTypeJson] }
it "works for char" $
get "/items_with_different_col_types?char_data=in." `shouldRespondWith`
[json| [] |] { matchHeaders = [matchContentTypeJson] }
it "works for date" $
get "/items_with_different_col_types?date_data=in." `shouldRespondWith`
[json| [] |] { matchHeaders = [matchContentTypeJson] }
it "works for real" $
get "/items_with_different_col_types?real_data=in." `shouldRespondWith`
[json| [] |] { matchHeaders = [matchContentTypeJson] }
it "works for time" $
get "/items_with_different_col_types?time_data=in." `shouldRespondWith`
[json| [] |] { matchHeaders = [matchContentTypeJson] }
it "returns all results for notin when no value is present" $
get "/items_with_different_col_types?int_data=notin.&select=int_data" `shouldRespondWith`
[json| [{int_data: 1}] |] { matchHeaders = [matchContentTypeJson] }
it "returns all results for not.in when no value is present" $
get "/items_with_different_col_types?int_data=not.in.&select=int_data" `shouldRespondWith`
[json| [{int_data: 1}] |] { matchHeaders = [matchContentTypeJson] }
it "returns an empty result ignoring spaces" $
get "/items_with_different_col_types?int_data=in. " `shouldRespondWith`
[json| [] |] { matchHeaders = [matchContentTypeJson] }
it "only returns an empty result set if the in value is empty" $
get "/items_with_different_col_types?int_data=in. ,3,4" `shouldRespondWith` 400
it "returns empty result when the in value is empty between parentheses" $
get "/items_with_different_col_types?int_data=in.()" `shouldRespondWith`
[json| [] |] { matchHeaders = [matchContentTypeJson] }
it "returns all results when the notin value is empty between parentheses" $ do
get "/items_with_different_col_types?int_data=notin.()&select=int_data" `shouldRespondWith`
[json| [{int_data: 1}] |] { matchHeaders = [matchContentTypeJson] }
get "/items_with_different_col_types?int_data=not.in.()&select=int_data" `shouldRespondWith`
[json| [{int_data: 1}] |] { matchHeaders = [matchContentTypeJson] }
describe "Transition to url safe characters" $ do
context "top level in operator" $ do
it "works with parentheses" $
get "/entities?id=in.(1,2,3)&select=id" `shouldRespondWith`
[json| [{"id": 1}, {"id": 2}, {"id": 3}] |] { matchHeaders = [matchContentTypeJson] }
it "works without parentheses" $
get "/entities?id=in.1,2,3&select=id" `shouldRespondWith`
[json| [{"id": 1}, {"id": 2}, {"id": 3}] |] { matchHeaders = [matchContentTypeJson] }
context "select query param" $ do
it "works with parentheses" $
get "/entities?id=eq.2&select=id,child_entities(id)" `shouldRespondWith`
[json| [{"id": 2, "child_entities": [{"id": 3}]}] |] { matchHeaders = [matchContentTypeJson] }
it "works with brackets" $
get "/entities?id=eq.2&select=id,child_entities{id}" `shouldRespondWith`
[json| [{"id": 2, "child_entities": [{"id": 3}]}] |] { matchHeaders = [matchContentTypeJson] }
|
Skyfold/postgrest
|
test/Feature/QuerySpec.hs
|
mit
| 41,445 | 0 | 22 | 8,987 | 6,147 | 3,495 | 2,652 | -1 | -1 |
{-# LANGUAGE Safe #-}
module Control.Concurrent.Transactional.Channel (
module Control.Concurrent.Transactional.Channel.Broadcast,
module Control.Concurrent.Transactional.Channel.Round,
module Control.Concurrent.Transactional.Channel.Swap
) where
import Control.Concurrent.Transactional.Channel.Broadcast
import Control.Concurrent.Transactional.Channel.Round
import Control.Concurrent.Transactional.Channel.Swap
|
YellPika/Hannel
|
src/Control/Concurrent/Transactional/Channel.hs
|
mit
| 426 | 0 | 5 | 35 | 62 | 47 | 15 | 8 | 0 |
-- PrettyJSON.hs
module PrettyJSON
(
renderJValue
) where
import Numeric (showHex)
import Data.Char (ord)
import Data.Bits (shiftR, (.&.))
import SimpleJSON (JValue(..))
import Prettify (Doc, (<>), char, double, fsep, hcat, punctuate, text,
compact, pretty)
renderJValue :: JValue -> Doc
renderJValue (JBool True) = text "true"
renderJValue (JBool False) = text "false"
renderJValue JNull = text "null"
renderJValue (JNumber num) = double num
renderJValue (JString str) = string str
renderJValue (JArray ary) = series '[' ']' renderJValue ary
renderJValue (JObject obj) = series '{' '}' field obj
where field (name, val) = string name
<> text ": "
<> renderJValue val
string :: String -> Doc
string = enclose '"' '"' . hcat . map oneChar
enclose :: Char -> Char -> Doc -> Doc
enclose left right x = char left <> x <> char right
oneChar :: Char -> Doc
oneChar c = case lookup simpleEscapes of
Just r -> text r
Nothing | mustEscape c -> hexEscape c
| otherwise -> char c
where mustEscape c = c < ' ' || c == '\x7f' || c > '\xff'
simpleEscapes :: [(Char, String)]
simpleEscapes = zipWith ch "\b\n\f\r\t\\\"/" "bnfrt\\\"/"
where ch a b = (a, ['\\',b])
smallHex :: Int -> Doc
smallHex x = text "\\u"
<> text (replicate (4 - length h) '0')
<> text h
where h = showHex x ""
astral :: Int -> Doc
astral n = smallHex (a + 0xd800) <> smallHex (b + 0xdc00)
where a = (n `shiftR` 10) .&. 0x3ff
b = n .&. 0x3ff
hexEscape :: Char -> Doc
hexEscape c | d < 0x10000 = smallHex d
| otherwise = astral (d - 0x10000)
where d = ord c
series :: Char -> Char -> (a -> Doc) -> [a] -> Doc
series open close item = enclose open close
. fsep . punctuate (char ',') . map item
|
sammyd/Learning-Haskell
|
ch05/PrettyJSON.hs
|
mit
| 1,887 | 0 | 12 | 548 | 743 | 383 | 360 | 49 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Network.Bitcoin.Api.Dump where
import Data.Aeson.Types (emptyArray)
import qualified Data.Bitcoin.Types as BT
import qualified Network.Bitcoin.Api.Internal as I
import qualified Network.Bitcoin.Api.Types as T
getPrivateKey :: T.Client -> BT.Address -> IO BT.PrivateKey
getPrivateKey client addr =
let configuration = [addr]
in I.call client "dumpprivkey" configuration
|
solatis/haskell-bitcoin-api
|
src/Network/Bitcoin/Api/Dump.hs
|
mit
| 455 | 0 | 9 | 90 | 104 | 64 | 40 | 10 | 1 |
module Types(
Matrix(..),
Point(..),
IntPoint,
GLPoint,
Line(..),
Viewport(..),
mkViewport,
translateMatrix,
scaleMatrix,
rotationMatrix
) where
import Graphics.Rendering.OpenGL.Raw.Core31
data Matrix a = Matrix{ w :: Int, h :: Int, mdata :: [[a]] } deriving (Show, Eq)
type Point a = (a, a)
type IntPoint = Point Int
type GLPoint = Point GLfloat
data Line a = Line {p_x :: Point a, p_y :: Point a} deriving (Show, Eq)
data Viewport = Viewport {minX :: Int, minY :: Int, maxX :: Int, maxY :: Int} deriving (Show, Eq)
--Safe making viewport
mkViewport :: Int -> Int -> Int -> Int -> Maybe Viewport
mkViewport minx miny maxx maxy
| minx < maxx && miny < maxy = Just (Viewport minx miny maxx maxy)
| otherwise = Nothing
translateMatrix tx ty = Matrix 3 3 [[1, 0, tx], [0, 1, ty], [0, 0, 1]]
scaleMatrix sx sy = Matrix 3 3 [[sx, 0, 0], [0, sy, 0], [0, 0, 1]]
rotationMatrix theta = Matrix 3 3 [[cos t, -(sin t), 0], [sin t, cos t, 0], [0, 0, 1]]
where t = - (theta * pi / 180) -- to change to clockwise rotation and map to rads
|
5outh/Haskell-Graphics-Projects
|
Project2/Types.hs
|
mit
| 1,063 | 0 | 10 | 233 | 495 | 291 | 204 | 26 | 1 |
{-# LANGUAGE ViewPatterns #-}
module Data.Time.Calendar.Month where
import Control.Arrow ((&&&))
import Data.Array
import Data.Char (toUpper)
import Data.Time.Calendar
{-- a solution to the problem posted at http://lpaste.net/1501657254015795200
@1HaskellADay solution for 2016-02-02
So, we have a problem.
We have dates in this format:
January 01, 2016
January 18, 2016
February 15, 2016
March 25, 2016
May 30, 2016
July 04, 2016
September 05, 2016
November 24, 2016
November 25, 2016
December 26, 2016
But we want them to be, you know: Day-values.
Today's problem. Convert the above values into Day-values.
--}
data Month = January | February | March | April | May | June
| July | August | September | October | November | December
deriving (Eq, Ord, Enum, Read, Show)
mosDayYrToDay :: String -> Day
mosDayYrToDay (words -> [mos, dayComma, yr]) =
let tehmonth = read mos :: Month in
fromGregorian (read yr) (succ $ fromEnum tehmonth) (read $ init dayComma)
-- to do the above function, you may need to 'read' in a Month value ... maybe.
-- convert dates to [Day]:
dates :: [String]
dates = ["January 01, 2016","January 18, 2016","February 15, 2016",
"March 25, 2016","May 30, 2016","July 04, 2016","September 05, 2016",
"November 24, 2016","November 25, 2016","December 26, 2016"]
{--
*Data.Time.Calendar.Month> map mosDayYrToDay dates ~>
[2016-01-01, 2016-01-18, 2016-02-15,
2016-03-25, 2016-05-30, 2016-07-04, 2016-09-05,
2016-11-24, 2016-11-25, 2016-12-26]
--}
{-- BONUS -------------------------------------------------------------------
Show each input string alongside each resulting Day
--}
datesandDays :: [String] -> [(String, Day)]
datesandDays = map (id &&& mosDayYrToDay)
{--
*Data.Time.Calendar.Month Control.Arrow> mapM_ print $ datesandDays dates
("January 01, 2016",2016-01-01)
("January 18, 2016",2016-01-18)
("February 15, 2016",2016-02-15)
("March 25, 2016",2016-03-25)
("May 30, 2016",2016-05-30)
("July 04, 2016",2016-07-04)
("September 05, 2016",2016-09-05)
("November 24, 2016",2016-11-24)
("November 25, 2016",2016-11-25)
("December 26, 2016",2016-12-26)
--}
-- Now we need abbreviated months for tweets:
data AbbrevMonth = JAN | FEB | MAR | APR | MAY | JUN | JUL
| AUG | SEP | OCT | NOV | DEC
deriving (Eq, Ord, Enum, Bounded, Ix, Show, Read)
readTweetDate :: String -> Day
readTweetDate (words -> [_,month,day,_,_,yr]) =
fromGregorian (read yr) (mos month) (read day)
mos :: String -> Int
mos = succ . fromEnum . readMos
readMos :: String -> AbbrevMonth
readMos = read . map toUpper
{--
An example from the JSON
*Data.Time.Calendar.Month> readTweetDate "Tue Apr 26 01:30:36 +0000 2016" ~>
2016-04-26
... which also happens to be my birthday, oddly enough ... SWEET!
--}
|
geophf/1HaskellADay
|
exercises/HAD/Data/Time/Calendar/Month.hs
|
mit
| 2,855 | 0 | 10 | 550 | 456 | 268 | 188 | 29 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module ImagePacker
( PackedImageInfo
, hasIntersection
, loadFiles
, packImages
, toPackedImageInfos
, writeTexture
) where
import qualified Codec.Picture as Picture
import qualified Codec.Picture.Types as Picture
import Control.Concurrent.Async (mapConcurrently, mapConcurrently_)
import Control.Exception (throw)
import Control.Monad (mplus)
import Control.Monad.ST (RealWorld)
import qualified Data.List as List
import qualified Data.Maybe as Maybe
import Data.Vector (Vector, (!))
import qualified Data.Vector as V (fromList, toList)
import qualified Data.Vector.Generic as V (copy, imapM_, slice)
import qualified Data.Vector.Storable as SV (unsafeCast)
import qualified Data.Vector.Storable.Mutable as MV (slice, unsafeCast, write)
import Data.Word (Word32)
import ImagePacker.Types
loadFiles :: [FilePath] -> IO (Vector(Picture.Image Picture.PixelRGBA8))
loadFiles = fmap V.fromList . mapConcurrently loadFile
where
loadFile filepath = do
a <- Picture.readImage filepath
image <- either (throw . userError) return a
return (Picture.convertRGBA8 image)
packImages
:: (Int, Int)
-> Int
-> Vector (Int, Int)
-> [Packed]
packImages (textureWidth, textureHeight) spacing xs =
List.foldl' pack [] . sortInputs . zip [0..] . V.toList $ xs
where
pack :: [Packed] -> (Int, (Int, Int)) -> [Packed]
pack ps a =
Maybe.fromMaybe (ps ++ [newRegion a]) (tryPack a ps)
newRegion (index, (w, h)) =
let layouts = Layout index (spacing, spacing) False : []
spaces = relocateSpaces (spacing, spacing) (w + spacing, h + spacing) (newRect (spacing, spacing) (textureWidth - spacing, textureHeight - spacing))
in Packed layouts spaces
tryPack _ [] = Nothing
tryPack a @ (index, (w, h)) (r : rs) =
case tryPackOne (index, (w + spacing, h + spacing)) r of
Just r' -> Just (r' : rs)
Nothing -> fmap (r :) (tryPack a rs)
sortInputs = List.sortBy (\(_, (lw, lh)) (_, (rw, rh)) -> compare (rw * rh) (lw * lh))
tryPackOne :: (Int, (Int, Int)) -> Packed -> Maybe Packed
tryPackOne (index, (w, h)) (Packed layouts spaces) = do
(Rect _ _ _ rp, rotated) <- s1 `mplus` s2
let layout = Layout index rp rotated
size = if rotated then (h, w) else (w, h)
(intersectSpaces, restSpaces) = List.partition (hasIntersection rp size) spaces
relocated = removeInclusion . concatMap (relocateSpaces rp size) $ intersectSpaces
return $ Packed (layout : layouts) (restSpaces ++ relocated)
where
s1 = List.find locatable $ zip spaces (repeat False)
s2 = List.find locatable $ zip spaces (repeat True)
locatable ((Rect _ rw rh _), False) = rw >= w && rh >= h
locatable ((Rect _ rw rh _), True) = rh >= w && rw >= h
hasIntersection :: (Int, Int) -> (Int, Int) -> Rect -> Bool
hasIntersection (x, y) (w, h) (Rect _ rw rh (rx, ry)) = dx < w + rw && dy < h + rh
where
(cx, cy) = (x * 2 + w, y * 2 + h)
(rcx, rcy) = (rx * 2 + rw, ry * 2 + rh)
(dx, dy) = (abs (cx - rcx), abs (cy - rcy))
relocateSpaces :: (Int, Int) -> (Int, Int) -> Rect -> [Rect]
relocateSpaces p s r =
horizontalSpaces p s r ++ verticalSpaces p s r
horizontalSpaces :: (Int, Int) -> (Int, Int) -> Rect -> [Rect]
horizontalSpaces (_, y) (_, h) (Rect _ rw rh (rx, ry))
| ry < y && (y + h) < (ry + rh) = [s1, s2]
| ry < y = [s1]
| (y + h) < (ry + rh) = [s2]
| otherwise = []
where
s1 = newRect (rx, ry) (rw, y - ry)
s2 = newRect (rx, y + h) (rw, ry + rh - y - h)
verticalSpaces :: (Int, Int) -> (Int, Int) -> Rect -> [Rect]
verticalSpaces (x, _) (w, _) (Rect _ rw rh (rx, ry))
| rx < x && (x + w) < (rx + rw) = [s1, s2]
| rx < x = [s1]
| (x + w) < (rx + rw) = [s2]
| otherwise = []
where
s1 = newRect (rx, ry) (x - rx, rh)
s2 = newRect (x + w, ry) (rx + rw - x - w, rh)
removeInclusion :: [Rect] -> [Rect]
removeInclusion = removeInclusion' . List.sort
where
removeInclusion' [] = []
removeInclusion' (x : xs)
| any (inclusion x) xs = removeInclusion' xs
| otherwise = x : removeInclusion' xs
inclusion (Rect _ w h (x, y)) (Rect _ rw rh (rx, ry)) =
rx <= x && x + w <= rx + rw && ry <= y && y + h <= ry + rh
newRect :: (Int, Int) -> (Int, Int) -> Rect
newRect p (w, h) = Rect (w * h) w h p
writeTexture :: Vector (Picture.Image Picture.PixelRGBA8) -> FilePath -> (Int, Int) -> Packed -> IO ()
writeTexture sources destination (width, height) (Packed layouts _) = do
texture <- Picture.newMutableImage width height
mapConcurrently_ (render texture) layouts
Picture.writePng destination =<< Picture.freezeImage texture
where
render texture (Layout index p True) =
writePixels texture p True $ sources ! index
render texture (Layout index p False) =
writeSubImage texture p $ sources ! index
writePixels texture (ox, oy) rotated img =
let imageData = SV.unsafeCast $ Picture.imageData img
textureData = MV.unsafeCast $ Picture.mutableImageData texture
w = Picture.imageWidth img
h = Picture.imageHeight img
write True i a =
let (y, x) = divMod i w
(x', y') = (ox + h - y - 1, oy + x)
in writeAt (x' + y' * width) a
write False i a =
let (y, x) = divMod i w
(x', y') = (ox + x, oy + y)
in writeAt (x' + y' * width) a
writeAt i a = MV.write textureData i (a :: Word32)
in V.imapM_ (write rotated) imageData
writeSubImage
:: Picture.MutableImage RealWorld Picture.PixelRGBA8 -> (Int, Int) -> Picture.Image Picture.PixelRGBA8 -> IO ()
writeSubImage target (x, y) source =
mapM_ (uncurry V.copy) (zip targetSlices sourceSlices)
where
componentCount = Picture.componentCount (undefined :: Picture.PixelRGBA8)
Picture.MutableImage targetWidth _ targetData = target
targetSliceStarts = [(x + (y + dy) * targetWidth) * componentCount | dy <- [0..(sourceHeight - 1)]]
lineSize = sourceWidth * componentCount
targetSlices = map (\s -> MV.slice s lineSize targetData) targetSliceStarts
Picture.Image sourceWidth sourceHeight sourceData = source
sourceSliceStarts = [dy * sourceWidth * componentCount | dy <- [0..(sourceHeight - 1)]]
sourceSlices = map (\s -> V.slice s lineSize sourceData) sourceSliceStarts
toPackedImageInfos :: Vector FilePath -> Vector (Int, Int) -> [Packed] -> [PackedImageInfo]
toPackedImageInfos sourceNames sizes ps =
List.concatMap f ([0..] `zip` ps)
where
f (t, Packed layouts _) = map (g t) layouts
g t (Layout index p _) =
PackedImageInfo (sourceNames ! index) t p (sizes ! index) False
|
bigsleep/ImagePacker
|
src/ImagePacker.hs
|
mit
| 6,891 | 1 | 18 | 1,759 | 2,954 | 1,595 | 1,359 | 139 | 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.