code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module HN.Curl where
import Network.Curl
-- | Download a string from a URI.
downloadString :: String -> IO (Either (CurlCode,String) String)
downloadString uri = do
withCurlDo $ do
(code,resp) <- curlGetString_ uri opts
case code of
CurlOK -> return (Right resp)
_ -> return (Left (code,resp))
-- Some silly servers think they're super smart by disallowing the
-- "Curl" user-agent. Aw. ^_^
where opts = [CurlUserAgent "Chrome"]
|
lwm/haskellnews
|
src/HN/Curl.hs
|
bsd-3-clause
| 462 | 0 | 16 | 100 | 129 | 68 | 61 | 10 | 2 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="id-ID">
<title>Encode/Decode/Hash Add-on</title>
<maps>
<homeID>encoder</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/encoder/src/main/javahelp/org/zaproxy/addon/encoder/resources/help_id_ID/helpset_id_ID.hs
|
apache-2.0
| 974 | 77 | 69 | 156 | 419 | 212 | 207 | -1 | -1 |
{-# LANGUAGE TypeOperators #-}
-- #2993
module T2993 where
foo b a = a <**> b . b
|
sdiehl/ghc
|
testsuite/tests/rename/should_fail/T2993.hs
|
bsd-3-clause
| 86 | 0 | 6 | 22 | 24 | 14 | 10 | -1 | -1 |
module HAD.Y2014.M03.D18.Exercise where
-- $setup
-- >>> import Data.Maybe
-- >>> let backPartner = (>>= partner) . (>>= partner)
data Person a = Single a | Married a (Person a)
partner :: Person a -> Maybe (Person a)
partner (Married _ p) = Just p
partner _ = Nothing
get :: Person a -> a
get (Single x) = x
get (Married x _) = x
-- | wedding
-- Marry single people, linking them together
-- Nothing if one is married
--
-- If you're used to Haskell, this one should be VERY easy.
-- But remember how strange it was the first time...
-- And see you tomorrow!
--
-- Examples:
--
-- >>> isNothing $ wedding (Married "foo" (Single "foobar")) (Single "bar")
-- True
--
-- prop> \(x,y) -> (fmap get . backPartner . fmap fst $ wedding (Single x) (Single y)) == Just (x :: String)
-- prop> \(x,y) -> (fmap get . backPartner . fmap snd $ wedding (Single x) (Single y)) == Just (y :: String)
wedding :: Person a -> Person a -> Maybe (Person a, Person a)
wedding = undefined
|
1HaskellADay/1HAD
|
exercises/HAD/Y2014/M03/D18/Exercise.hs
|
mit
| 975 | 0 | 9 | 197 | 179 | 101 | 78 | 10 | 1 |
{-# LANGUAGE CPP, OverloadedStrings #-}
module LoadCache where
import Haxl.Core
import ExampleDataSource
#include "LoadCache.txt"
|
GaussDing/Haxl
|
tests/LoadCache.hs
|
bsd-3-clause
| 132 | 0 | 4 | 16 | 14 | 10 | 4 | 4 | 0 |
module Distribution.Client.Dependency.Modular.Validate (validateTree) where
-- Validation of the tree.
--
-- The task here is to make sure all constraints hold. After validation, any
-- assignment returned by exploration of the tree should be a complete valid
-- assignment, i.e., actually constitute a solution.
import Control.Applicative
import Control.Monad.Reader hiding (sequence)
import Data.List as L
import Data.Map as M
import Data.Traversable
import Prelude hiding (sequence)
import Distribution.Client.Dependency.Modular.Assignment
import Distribution.Client.Dependency.Modular.Dependency
import Distribution.Client.Dependency.Modular.Flag
import Distribution.Client.Dependency.Modular.Index
import Distribution.Client.Dependency.Modular.Package
import Distribution.Client.Dependency.Modular.PSQ as P
import Distribution.Client.Dependency.Modular.Tree
import Distribution.Client.ComponentDeps (Component)
-- In practice, most constraints are implication constraints (IF we have made
-- a number of choices, THEN we also have to ensure that). We call constraints
-- that for which the preconditions are fulfilled ACTIVE. We maintain a set
-- of currently active constraints that we pass down the node.
--
-- We aim at detecting inconsistent states as early as possible.
--
-- Whenever we make a choice, there are two things that need to happen:
--
-- (1) We must check that the choice is consistent with the currently
-- active constraints.
--
-- (2) The choice increases the set of active constraints. For the new
-- active constraints, we must check that they are consistent with
-- the current state.
--
-- We can actually merge (1) and (2) by saying the the current choice is
-- a new active constraint, fixing the choice.
--
-- If a test fails, we have detected an inconsistent state. We can
-- disable the current subtree and do not have to traverse it any further.
--
-- We need a good way to represent the current state, i.e., the current
-- set of active constraints. Since the main situation where we have to
-- search in it is (1), it seems best to store the state by package: for
-- every package, we store which versions are still allowed. If for any
-- package, we have inconsistent active constraints, we can also stop.
-- This is a particular way to read task (2):
--
-- (2, weak) We only check if the new constraints are consistent with
-- the choices we've already made, and add them to the active set.
--
-- (2, strong) We check if the new constraints are consistent with the
-- choices we've already made, and the constraints we already have.
--
-- It currently seems as if we're implementing the weak variant. However,
-- when used together with 'preferEasyGoalChoices', we will find an
-- inconsistent state in the very next step.
--
-- What do we do about flags?
--
-- Like for packages, we store the flag choices we have already made.
-- Now, regarding (1), we only have to test whether we've decided the
-- current flag before. Regarding (2), the interesting bit is in discovering
-- the new active constraints. To this end, we look up the constraints for
-- the package the flag belongs to, and traverse its flagged dependencies.
-- Wherever we find the flag in question, we start recording dependencies
-- underneath as new active dependencies. If we encounter other flags, we
-- check if we've chosen them already and either proceed or stop.
-- | The state needed during validation.
data ValidateState = VS {
index :: Index,
saved :: Map QPN (FlaggedDeps Component QPN), -- saved, scoped, dependencies
pa :: PreAssignment,
qualifyOptions :: QualifyOptions
}
type Validate = Reader ValidateState
validate :: Tree QGoalReasonChain -> Validate (Tree QGoalReasonChain)
validate = cata go
where
go :: TreeF QGoalReasonChain (Validate (Tree QGoalReasonChain)) -> Validate (Tree QGoalReasonChain)
go (PChoiceF qpn gr ts) = PChoice qpn gr <$> sequence (P.mapWithKey (goP qpn gr) ts)
go (FChoiceF qfn gr b m ts) =
do
-- Flag choices may occur repeatedly (because they can introduce new constraints
-- in various places). However, subsequent choices must be consistent. We thereby
-- collapse repeated flag choice nodes.
PA _ pfa _ <- asks pa -- obtain current flag-preassignment
case M.lookup qfn pfa of
Just rb -> -- flag has already been assigned; collapse choice to the correct branch
case P.lookup rb ts of
Just t -> goF qfn gr rb t
Nothing -> return $ Fail (toConflictSet (Goal (F qfn) gr)) (MalformedFlagChoice qfn)
Nothing -> -- flag choice is new, follow both branches
FChoice qfn gr b m <$> sequence (P.mapWithKey (goF qfn gr) ts)
go (SChoiceF qsn gr b ts) =
do
-- Optional stanza choices are very similar to flag choices.
PA _ _ psa <- asks pa -- obtain current stanza-preassignment
case M.lookup qsn psa of
Just rb -> -- stanza choice has already been made; collapse choice to the correct branch
case P.lookup rb ts of
Just t -> goS qsn gr rb t
Nothing -> return $ Fail (toConflictSet (Goal (S qsn) gr)) (MalformedStanzaChoice qsn)
Nothing -> -- stanza choice is new, follow both branches
SChoice qsn gr b <$> sequence (P.mapWithKey (goS qsn gr) ts)
-- We don't need to do anything for goal choices or failure nodes.
go (GoalChoiceF ts) = GoalChoice <$> sequence ts
go (DoneF rdm ) = pure (Done rdm)
go (FailF c fr ) = pure (Fail c fr)
-- What to do for package nodes ...
goP :: QPN -> QGoalReasonChain -> POption -> Validate (Tree QGoalReasonChain) -> Validate (Tree QGoalReasonChain)
goP qpn@(Q _pp pn) gr (POption i _) r = do
PA ppa pfa psa <- asks pa -- obtain current preassignment
idx <- asks index -- obtain the index
svd <- asks saved -- obtain saved dependencies
qo <- asks qualifyOptions
-- obtain dependencies and index-dictated exclusions introduced by the choice
let (PInfo deps _ mfr) = idx ! pn ! i
-- qualify the deps in the current scope
let qdeps = qualifyDeps qo qpn deps
-- the new active constraints are given by the instance we have chosen,
-- plus the dependency information we have for that instance
let goal = Goal (P qpn) gr
let newactives = Dep qpn (Fixed i goal) : L.map (resetGoal goal) (extractDeps pfa psa qdeps)
-- We now try to extend the partial assignment with the new active constraints.
let mnppa = extend (P qpn) ppa newactives
-- In case we continue, we save the scoped dependencies
let nsvd = M.insert qpn qdeps svd
case mfr of
Just fr -> -- The index marks this as an invalid choice. We can stop.
return (Fail (toConflictSet goal) fr)
_ -> case mnppa of
Left (c, d) -> -- We have an inconsistency. We can stop.
return (Fail c (Conflicting d))
Right nppa -> -- We have an updated partial assignment for the recursive validation.
local (\ s -> s { pa = PA nppa pfa psa, saved = nsvd }) r
-- What to do for flag nodes ...
goF :: QFN -> QGoalReasonChain -> Bool -> Validate (Tree QGoalReasonChain) -> Validate (Tree QGoalReasonChain)
goF qfn@(FN (PI qpn _i) _f) gr b r = do
PA ppa pfa psa <- asks pa -- obtain current preassignment
svd <- asks saved -- obtain saved dependencies
-- Note that there should be saved dependencies for the package in question,
-- because while building, we do not choose flags before we see the packages
-- that define them.
let qdeps = svd ! qpn
-- We take the *saved* dependencies, because these have been qualified in the
-- correct scope.
--
-- Extend the flag assignment
let npfa = M.insert qfn b pfa
-- We now try to get the new active dependencies we might learn about because
-- we have chosen a new flag.
let newactives = extractNewDeps (F qfn) gr b npfa psa qdeps
-- As in the package case, we try to extend the partial assignment.
case extend (F qfn) ppa newactives of
Left (c, d) -> return (Fail c (Conflicting d)) -- inconsistency found
Right nppa -> local (\ s -> s { pa = PA nppa npfa psa }) r
-- What to do for stanza nodes (similar to flag nodes) ...
goS :: QSN -> QGoalReasonChain -> Bool -> Validate (Tree QGoalReasonChain) -> Validate (Tree QGoalReasonChain)
goS qsn@(SN (PI qpn _i) _f) gr b r = do
PA ppa pfa psa <- asks pa -- obtain current preassignment
svd <- asks saved -- obtain saved dependencies
-- Note that there should be saved dependencies for the package in question,
-- because while building, we do not choose flags before we see the packages
-- that define them.
let qdeps = svd ! qpn
-- We take the *saved* dependencies, because these have been qualified in the
-- correct scope.
--
-- Extend the flag assignment
let npsa = M.insert qsn b psa
-- We now try to get the new active dependencies we might learn about because
-- we have chosen a new flag.
let newactives = extractNewDeps (S qsn) gr b pfa npsa qdeps
-- As in the package case, we try to extend the partial assignment.
case extend (S qsn) ppa newactives of
Left (c, d) -> return (Fail c (Conflicting d)) -- inconsistency found
Right nppa -> local (\ s -> s { pa = PA nppa pfa npsa }) r
-- | We try to extract as many concrete dependencies from the given flagged
-- dependencies as possible. We make use of all the flag knowledge we have
-- already acquired.
extractDeps :: FAssignment -> SAssignment -> FlaggedDeps comp QPN -> [Dep QPN]
extractDeps fa sa deps = do
d <- deps
case d of
Simple sd _ -> return sd
Flagged qfn _ td fd -> case M.lookup qfn fa of
Nothing -> mzero
Just True -> extractDeps fa sa td
Just False -> extractDeps fa sa fd
Stanza qsn td -> case M.lookup qsn sa of
Nothing -> mzero
Just True -> extractDeps fa sa td
Just False -> []
-- | We try to find new dependencies that become available due to the given
-- flag or stanza choice. We therefore look for the choice in question, and then call
-- 'extractDeps' for everything underneath.
extractNewDeps :: Var QPN -> QGoalReasonChain -> Bool -> FAssignment -> SAssignment -> FlaggedDeps comp QPN -> [Dep QPN]
extractNewDeps v gr b fa sa = go
where
go :: FlaggedDeps comp QPN -> [Dep QPN] -- Type annotation necessary (polymorphic recursion)
go deps = do
d <- deps
case d of
Simple _ _ -> mzero
Flagged qfn' _ td fd
| v == F qfn' -> L.map (resetGoal (Goal v gr)) $
if b then extractDeps fa sa td else extractDeps fa sa fd
| otherwise -> case M.lookup qfn' fa of
Nothing -> mzero
Just True -> go td
Just False -> go fd
Stanza qsn' td
| v == S qsn' -> L.map (resetGoal (Goal v gr)) $
if b then extractDeps fa sa td else []
| otherwise -> case M.lookup qsn' sa of
Nothing -> mzero
Just True -> go td
Just False -> []
-- | Interface.
validateTree :: Index -> Tree QGoalReasonChain -> Tree QGoalReasonChain
validateTree idx t = runReader (validate t) VS {
index = idx
, saved = M.empty
, pa = PA M.empty M.empty M.empty
, qualifyOptions = defaultQualifyOptions idx
}
|
enolan/cabal
|
cabal-install/Distribution/Client/Dependency/Modular/Validate.hs
|
bsd-3-clause
| 12,175 | 0 | 22 | 3,508 | 2,329 | 1,199 | 1,130 | 128 | 14 |
{-# LANGUAGE TypeFamilies, GeneralizedNewtypeDeriving #-}
module ShouldCompile where
import Control.Applicative (Applicative)
data family S a
newtype instance S Int = S Int
deriving Eq
data family S2 a b
newtype instance S2 Int b = S2 (IO b)
deriving (Functor, Applicative, Monad)
|
snoyberg/ghc
|
testsuite/tests/indexed-types/should_compile/DerivingNewType.hs
|
bsd-3-clause
| 337 | 0 | 7 | 98 | 78 | 47 | 31 | 9 | 0 |
{-# OPTIONS_GHC -XLiberalTypeSynonyms #-}
module ShouldCompile where
type T a b = a
type S m = m ()
f :: S (T Int)
f = undefined
|
urbanslug/ghc
|
testsuite/tests/typecheck/should_compile/tc234.hs
|
bsd-3-clause
| 136 | 0 | 7 | 34 | 44 | 27 | 17 | 6 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Floating
( floating
) where
import Fractional (fractional)
import System.Random (Random)
import Test.QuickCheck.Checkers (EqProp, (=-=), inverseL)
import Test.QuickCheck.Extra (UnitInterval(..), Tiny(..), BiunitInterval)
import Test.Tasty (testGroup, TestTree)
import Test.Tasty.QuickCheck (testProperty, NonNegative(..), Positive(..), Arbitrary, (==>))
import Test.Tasty.HUnit (testCase, (@?=))
floating :: forall a. (Arbitrary a, EqProp a, Show a, Floating a, Ord a, Random a) =>
a -> TestTree
floating _ = testGroup "Test Floating instance" ts
where e = exp 1
ts = [ fractional (undefined :: a)
, testCase "π/4 = atan 1" ((pi::a) @?= 4 * atan 1)
, testProperty "log == logBase e"
(log =-= logBase (e :: Positive a))
, testProperty "exp == (e **)" (exp =-= ((e::a) **))
, testProperty "sqrt x * sqrt x = x"
(\(NonNegative (x :: a)) -> let r = sqrt x
in r * r == x)
, testProperty "law of exponents"
(\(Positive (base :: a)) x y ->
base ** (x + y) =-= base ** x * base ** y)
, testProperty "logarithm definition"
(\(Positive (b :: a)) (Tiny c) ->
let x = b ** c
in b /= 1 ==> c =-= logBase b x)
, testProperty "sine cosine definition"
(\x (y :: a) ->
cos (x - y) =-= cos x * cos y + sin x * sin y)
-- TODO: Use open interval
, testProperty "0 < x cos x"
(\(x::UnitInterval a) -> 0 <= x * cos x)
-- Use <= here because of precision issues :(
, testProperty "x cos x < sin x"
(\(x::UnitInterval a) -> x * cos x <= sin x)
, testProperty "sin x < x" (\(x::UnitInterval a) -> sin x <= x)
, testProperty "tangent definition"
(\(x::a) -> cos x /= 0 ==> tan x =-= sin x / cos x)
, testProperty "asin left inverse"
(inverseL sin (asin :: BiunitInterval a -> BiunitInterval a))
, testProperty "acos left inverse"
(inverseL cos (acos :: BiunitInterval a -> BiunitInterval a))
, testProperty "atan left inverse" (inverseL tan (atan :: a -> a))
, testProperty "sinh definition"
(\(x::a) -> sinh x =-= (exp x - exp (-x)) / 2)
, testProperty "cosh definition"
(\(x::a) -> cosh x =-= (exp x + exp (-x)) / 2)
, testProperty "tanh definition"
(\(x::a) -> tanh x =-= sinh x / cosh x)
, testProperty "sinh left inverse"
(inverseL asinh (sinh :: a -> a))
, testProperty "cosh left inverse"
(acosh . cosh =-= (abs :: a -> a))
, testProperty "tanh left inverse"
(inverseL atanh (tanh :: Tiny a -> Tiny a))
]
|
expipiplus1/exact-real
|
test/Floating.hs
|
mit
| 3,332 | 0 | 17 | 1,388 | 1,035 | 557 | 478 | -1 | -1 |
import Data.List (foldl1')
import Math.NumberTheory.Primes.Testing (isPrime)
--import Control.Parallel.Strategies (using, parList, rseq)
-- isPrime :: Integer -> Bool
-- isPrime n
-- | n < 2 = False
-- | n == 2 = True
-- | otherwise =
-- let sqn = (floor (sqrt (fromIntegral n))) + 1
-- in and [ n `mod` d /= 0 | d <- [2..sqn] ]
formula :: Integer -> Integer -> Integer -> Integer
formula a b n = n*n + a*n + b
test :: Integer -> Integer -> Integer -> Bool
test a b n = isPrime $ formula a b n
primeSequence :: Integer -> Integer -> [Integer]
primeSequence a b = takeWhile (test a b) [0..]
sequenceLength :: Integer -> Integer -> Int
sequenceLength a b = length $ primeSequence a b
maximum' = foldl1' max
sequenceLenghts = [ (sequenceLength a b, a, b)
| a <- [(-999)..999]
, b <- [(-999)..999]
] -- `using` parList rseq
euler27 = maximum' sequenceLenghts
main = print $ a * b
where (len, a, b) = euler27
--(71,-61,971)
--(30.64 secs, 5773882468 bytes) -- using my isPrime implementation
--(15.15 secs, 2572417912 bytes) -- using Math.NumberTheory.Primes.Testing.isPrime
|
feliposz/project-euler-solutions
|
haskell/euler27.hs
|
mit
| 1,153 | 0 | 10 | 275 | 298 | 165 | 133 | 17 | 1 |
-- ($) :: (a -> b) -> a -> b
-- f $ x = f x
-- f a b c === (((f a) b) c)
test1 = sum (map sqrt [1..130])
test2 = sum $ map sqrt [1..130]
test3 = sqrt (3 + 4 + 9)
test4 = sqrt $ 3 + 4 + 9
test5 = sum (filter (> 10) (map (*2) [2..10]))
test6 = sum $ filter (> 10) (map (*2) [2..10])
test7 = sum $ filter (> 10) $ map (*2) [2..10]
test8 = map ($ 3) [(/4), (10*), (^2), sqrt]
|
v0lkan/learning-haskell
|
session-archive/007-dollar.hs
|
mit
| 377 | 0 | 10 | 101 | 224 | 128 | 96 | 8 | 1 |
main = putStrLn "สวัสดีครับ"
|
merxer/kata
|
haskell/14.hs
|
mit
| 49 | 0 | 5 | 4 | 9 | 4 | 5 | 1 | 1 |
{-# LANGUAGE TupleSections #-}
module SoOSiM.Components.ResourceManager.Behaviour where
import Control.Arrow (first,second)
import Data.Char (toLower)
import qualified Data.HashMap.Strict as HashMap
import Data.List (mapAccumL,intersect,(\\),partition)
import SoOSiM
import SoOSiM.Components.ResourceDescriptor
import SoOSiM.Components.ResourceManager.Interface
import SoOSiM.Components.ResourceManager.Types
behaviour ::
RM_State
-> Input RM_Cmd
-> Sim RM_State
behaviour s (Message _ (AddResource rId rd) retAddr) = do
let rs = HashMap.insert rId rd (resources s)
rsI = HashMap.insertWith (flip (++)) rd [rId] (resources_inv s)
s' = s { resources = rs, resources_inv = rsI, free_resources = (free_resources s) ++ [rId] }
yield s'
behaviour s (Message _ (RequestResources appId rsList) retAddr) = do
let (free',ids) = assignFree s rsList
busy = map (,appId) ids
s' = s { free_resources = free', busy_resources = (busy_resources s) ++ busy }
traceMsg ("REQ: " ++ show (rsList,ids))
respond ResourceManager retAddr (RM_Resources ids)
yield s'
behaviour s (Message _ (FreeAllResources appId) retAddr) = do
let (freed,busy') = first (map fst) $ partition ((== appId) . snd) (busy_resources s)
s' = s { free_resources = (free_resources s) ++ freed, busy_resources = busy' }
yield s'
behaviour s (Message _ (FreeResources appId rIds) retAddr) = do
let (freed,busy') = first (map fst) $ partition (\(rId,aId) -> aId == appId && rId `elem` rIds) (busy_resources s)
s' = s { free_resources = (free_resources s) ++ freed, busy_resources = busy' }
yield s'
behaviour s (Message _ (GetResourceDescription rId) retAddr) = do
let rdM = HashMap.lookup rId (resources s)
respond ResourceManager retAddr (RM_Descriptor rdM)
yield s
behaviour s _ = yield s
checkFree :: String -> ResourceFreeList -> ([ResourceId],Int) -> (ResourceFreeList,[ResourceId])
checkFree dm free (keys,needed)
= let keys' = intersect keys free
keys'' = take needed $ case dm of
"all" -> keys'
"half" -> take (ceiling $ (fromIntegral $ length keys') / 2) keys'
_ -> keys'
free' = free \\ keys''
in (free',keys'')
assignFree :: RM_State -> ResourceRequestList -> (ResourceFreeList,[ResourceId])
assignFree s rsList = (free',givenIds)
where
available = map (\(rTy,_) -> concat $
HashMap.elems $
HashMap.filterWithKey (\k _ -> isComplient k rTy) (resources_inv s)
) rsList
wanted = zip available (map snd rsList)
dm = map toLower (dist_method s)
(free',givenIds) = second concat $ mapAccumL (checkFree dm) (free_resources s) wanted
|
christiaanb/SoOSiM-components
|
src/SoOSiM/Components/ResourceManager/Behaviour.hs
|
mit
| 2,912 | 0 | 19 | 780 | 1,013 | 538 | 475 | 57 | 3 |
module Unison.Typechecker.Components (components, minimize, minimize') where
import Data.Bifunctor (first)
import qualified Data.Graph as Graph
import qualified Data.Map as Map
import Data.Maybe
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Unison.ABT as ABT
import Unison.Term (AnnotatedTerm')
import qualified Unison.Term as Term
import Unison.Var (Var)
components :: Var v => [(v, ABT.Term f v a)] -> [[(v, ABT.Term f v a)]]
components = components' ABT.freeVars
-- | Order bindings by dependencies and group into components.
-- Each component consists of > 1 bindings, each of which depends
-- transitively on all other bindings in the component.
--
-- 1-element components may or may not depend on themselves.
--
-- The order is such that a component at index i will not depend
-- on components and indexes > i. But a component at index i does not
-- _necessarily_ depend on any components at earlier indices.
--
-- Example:
--
-- let rec
-- ping n = pong (n + 1);
-- pong n = ping (n + 1);
-- g = id 42;
-- y = id "hi"
-- id x = x;
-- in ping g
--
-- `components` would produce `[[ping,pong], [id], [g], [y]]`
-- Notice that `id` comes before `g` and `y` in the output, since
-- both `g` and `y` depend on `id`.
--
-- Uses Tarjan's algorithm:
-- https://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
components' :: Var v => (t -> Set v) -> [(v, t)] -> [[(v, t)]]
components' freeVars bs =
let
varIds = Map.fromList (map fst bs `zip` [(0::Int)..])
varId v = fromJust $ Map.lookup v varIds -- something horribly wrong if this bombs
-- use ints as keys for graph to preserve original source order as much as possible
graph = [ ((v,b), varId v, deps b) | (v,b) <- bs ]
vars = Set.fromList (map fst bs)
deps b = varId <$> Set.toList (Set.intersection vars (freeVars b))
in
Graph.flattenSCC <$> Graph.stronglyConnComp graph
-- | Algorithm for minimizing cycles of a `let rec`. This can
-- improve generalization during typechecking and may also be more
-- efficient for execution.
--
-- For instance:
--
-- minimize (let rec id x = x; g = id 42; y = id "hi" in g)
-- ==>
-- Just (let id x = x; g = id 42; y = id "hi" in g)
--
-- Gets rid of the let rec and replaces it with an ordinary `let`, such
-- that `id` is suitably generalized.
minimize :: Var v => AnnotatedTerm' vt v a -> Maybe (AnnotatedTerm' vt v a)
minimize (Term.LetRecNamedAnnotated' ann bs e) = case components (first snd <$> bs) of
[_single] -> Nothing
cs ->
let
varAnnotations = Map.fromList ((\((a,v),_) -> (v,a)) <$> bs)
annotationFor v = fromJust $ Map.lookup v varAnnotations
annotatedVar v = (annotationFor v, v)
-- When introducing a nested let/let rec, we use the annotation of the
-- variable that starts off that let/let rec
mklet [(hdv,hdb)] e
| Set.member hdv (ABT.freeVars hdb) = Term.letRec (annotationFor hdv) [(annotatedVar hdv, hdb)] e
| otherwise = Term.let1 [(annotatedVar hdv,hdb)] e
mklet cycle@((hdv,_):_) e = Term.letRec (annotationFor hdv) (first annotatedVar <$> cycle) e
mklet [] e = e
in
-- The outer annotation is going to be meaningful, so we make
-- sure to preserve it, whereas the annotations at intermediate Abs nodes
-- aren't necessarily meaningful
Just $ ABT.annotate ann (foldr mklet e cs) where
minimize _ = Nothing
minimize' :: Var v => AnnotatedTerm' vt v a -> AnnotatedTerm' vt v a
minimize' term = fromMaybe term (minimize term)
|
paulp/unison
|
parser-typechecker/src/Unison/Typechecker/Components.hs
|
mit
| 3,618 | 0 | 18 | 815 | 847 | 476 | 371 | 39 | 4 |
{-# LANGUAGE OverloadedStrings, QuasiQuotes #-}
module Y2017.M11.D27.Exercise where
{--
Okay, we have a set of recommended articles, now we want to add some new
articles to the list. So, given an article id as a basis and a set of article
ids to add, add those articles to the source article's recommended list.
Yes: it isn't rocket science.
--}
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToRow
import Store.SQL.Connection
import Store.SQL.Util.Pivots
{--
Actually, when you get right down to it: since added articles are not scored,
adding rows to the recommendation table becomes a pivot table exercise. But
even then, since we're only working with one source article to add articles to,
this is just an insert-in-context.
--}
insertRecsStmt :: Query
insertRecsStmt =
[sql|INSERT INTO recommendation (for_article_id,recommended_article_id)
VALUES (?,?)|]
insertRec :: Connection -> Integer -> [Integer] -> IO ()
insertRec conn srcId recs = undefined
{-- BONUS -----------------------------------------------------------------
Write a program that takes a source article ID and a list of recommended
article ids an inserts that set into recommendation.
--}
main' :: [String] -> IO ()
main' artIds = undefined
|
geophf/1HaskellADay
|
exercises/HAD/Y2017/M11/D27/Exercise.hs
|
mit
| 1,299 | 0 | 9 | 203 | 121 | 75 | 46 | 14 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
module TimeUnitsJSONInstance where
import Data.Aeson.TH (Options (omitNothingFields),
defaultOptions, deriveJSON)
import Data.Time
import Data.Time.Clock.POSIX
import Data.Time.Units
import GHC.Generics
import Protolude hiding (FilePath)
import Text.PrettyPrint.GenericPretty
TODO NOT used anymore, delete it
$(deriveJSON defaultOptions {omitNothingFields = True} ''Microsecond)
-- deriving instance Generic Microsecond
-- deriving instance Pretty Microsecond
-- instance Pretty Microsecond where
-- pretty ( Microsecond m) =
-- to use for the ssLastMarketSubscriptionMessageSentAt
timeInMicroseconds :: IO Microsecond
timeInMicroseconds =
fromMicroseconds . fromIntegral . numerator . toRational . (* 1000000) <$>
getPOSIXTime
|
joe9/streaming-betfair-api
|
src/TimeUnitsJSONInstance.hs
|
mit
| 1,044 | 2 | 9 | 185 | 152 | 92 | 60 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iotanalytics-dataset-deltatime.html
module Stratosphere.ResourceProperties.IoTAnalyticsDatasetDeltaTime where
import Stratosphere.ResourceImports
-- | Full data type definition for IoTAnalyticsDatasetDeltaTime. See
-- 'ioTAnalyticsDatasetDeltaTime' for a more convenient constructor.
data IoTAnalyticsDatasetDeltaTime =
IoTAnalyticsDatasetDeltaTime
{ _ioTAnalyticsDatasetDeltaTimeOffsetSeconds :: Val Integer
, _ioTAnalyticsDatasetDeltaTimeTimeExpression :: Val Text
} deriving (Show, Eq)
instance ToJSON IoTAnalyticsDatasetDeltaTime where
toJSON IoTAnalyticsDatasetDeltaTime{..} =
object $
catMaybes
[ (Just . ("OffsetSeconds",) . toJSON) _ioTAnalyticsDatasetDeltaTimeOffsetSeconds
, (Just . ("TimeExpression",) . toJSON) _ioTAnalyticsDatasetDeltaTimeTimeExpression
]
-- | Constructor for 'IoTAnalyticsDatasetDeltaTime' containing required fields
-- as arguments.
ioTAnalyticsDatasetDeltaTime
:: Val Integer -- ^ 'itaddtOffsetSeconds'
-> Val Text -- ^ 'itaddtTimeExpression'
-> IoTAnalyticsDatasetDeltaTime
ioTAnalyticsDatasetDeltaTime offsetSecondsarg timeExpressionarg =
IoTAnalyticsDatasetDeltaTime
{ _ioTAnalyticsDatasetDeltaTimeOffsetSeconds = offsetSecondsarg
, _ioTAnalyticsDatasetDeltaTimeTimeExpression = timeExpressionarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iotanalytics-dataset-deltatime.html#cfn-iotanalytics-dataset-deltatime-offsetseconds
itaddtOffsetSeconds :: Lens' IoTAnalyticsDatasetDeltaTime (Val Integer)
itaddtOffsetSeconds = lens _ioTAnalyticsDatasetDeltaTimeOffsetSeconds (\s a -> s { _ioTAnalyticsDatasetDeltaTimeOffsetSeconds = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iotanalytics-dataset-deltatime.html#cfn-iotanalytics-dataset-deltatime-timeexpression
itaddtTimeExpression :: Lens' IoTAnalyticsDatasetDeltaTime (Val Text)
itaddtTimeExpression = lens _ioTAnalyticsDatasetDeltaTimeTimeExpression (\s a -> s { _ioTAnalyticsDatasetDeltaTimeTimeExpression = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/IoTAnalyticsDatasetDeltaTime.hs
|
mit
| 2,243 | 0 | 13 | 220 | 265 | 151 | 114 | 29 | 1 |
module Main where
import Filter.Abstract (abstract)
import Filter.Attr (simplifyAttr)
import Filter.Bib (bibliography)
import Filter.Float (float)
import Filter.Hyperref (hyperref)
import Filter.LinksAsNotes (linksAsNotes)
import Filter.Macros (processMacros)
import Filter.MultiBib (multibib)
import Filter.Multicol (multicol)
import Filter.NumberRef (numberRef)
import Filter.WrapFloat (wrapFloat)
import Paths_ppp (version)
import PostProcess (trim)
import PreProcess (include)
import Reader (toPandoc)
import Writer (toTex, toPdf)
import Control.Monad (forM_)
import qualified Data.ByteString.Lazy.Char8 as BS
import Data.Monoid ((<>))
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Version (showVersion)
import Options.Applicative
import System.FilePath ((-<.>), takeExtension)
ppp :: FilePath -> IO T.Text
ppp fp
= fmap trim
. toTex
=<< processMacros False
. hyperref
. float
. wrapFloat
. multibib
=<< bibliography
. linksAsNotes
. numberRef
. multicol
. abstract
. simplifyAttr
=<< processMacros True
=<< toPandoc
=<< include fp
data Options = Options
{ targetTex :: Bool
, sourceFiles :: [FilePath]
}
parser :: Parser Options
parser
= infoOption ( "ppp version " ++ showVersion version )
( long "version"
<> help "show version information and exit" )
<*> abortOption ShowHelpText
( long "help"
<> help "show usage information and exit" )
<*> ( Options
<$> switch
( long "tex"
<> help "convert to latex instead of pdf" )
<*> some ( strArgument ( metavar "<file>..." ) ) )
main :: IO ()
main = do
opts <- execParser $ info parser idm
forM_ (sourceFiles opts) $ \file -> case (targetTex opts, takeExtension file) of
(False, ".md" ) -> do
putStrLn $ "rendering " ++ (file -<.> "pdf") ++ "..."
BS.writeFile (file -<.> "pdf") =<< toPdf =<< ppp file
(False, ".tex") -> do
putStrLn $ "rendering " ++ (file -<.> "pdf") ++ "..."
BS.writeFile (file -<.> "pdf") =<< toPdf =<< T.readFile file
(True, ".md" ) -> do
putStrLn $ "rendering " ++ (file -<.> "tex") ++ "..."
T.writeFile (file -<.> "tex") =<< ppp file
(True, ".tex") -> error $ file ++ ": is already in tex format"
(_, ext ) -> error $ file ++ ": unrecognised file extension `" ++ ext ++ "'"
|
Thhethssmuz/ppp
|
src/Main.hs
|
mit
| 2,510 | 0 | 20 | 662 | 757 | 410 | 347 | 74 | 5 |
-- Paradigme de programare, laboratorul 2: functii recursive
-- 1. Factorialul unui număr dat, fără restricții și tail recursive.
--
-- TDA-ul număr se aseamănă cu TDA-ul listă:
-- - 0 (caz de bază) <- 0! = 1
-- - n (pas de inducție) <- n! = n * (n - 1)!
factorial 0 = 1
factorial n = n * factorial (n - 1)
-- Cum se evaluează un factorial n oarecare?
--
-- > factorial n
-- > n * factorial (n - 1)
-- > n * (n - 1) * factorial (n - 2)
-- > ...
--
-- Avem aceeași problemă cu spațiul, iar optimizarea tail-recursive
-- poate fi făcută adăugând un parametru de tip "acumulator", care va
-- avea valoarea inițială 1 (valoarea cazului de bază). Observăm că
-- astfel calculul se face pe avans.
factorialTail n = let factorialAux acc 0 = acc
factorialAux acc n = factorialAux (acc * n) (n - 1)
in factorialAux 1 n
-- 2. Al n-lea număr din șirul fibonacci. Problema e similară cu cea
-- anterioară, doar că avem de-a face cu o recurență de ordinul 2:
--
-- - Numărul Fibonacci de la indicele 0 este 0
-- - Numărul Fibonacci de la indicele 1 este 1
-- - Numărul Fibonacci de la indicele n este suma numerelor de la
-- indicele n - 1 și n - 2.
fibonacci 0 = 0
fibonacci 1 = 1
fibonacci n = fibonacci (n - 1) + fibonacci (n - 2)
-- Evaluarea fibonacci 4:
-- > fibonacci 4
-- > fibonacci 3 + fibonacci 2
-- > (fibonacci 2 + fibonacci 1) + (fibonacci 1 + fibonacci 0)
-- > ((fibonacci 1 + fibonacci 0) + 1) + (1 + 0)
-- > ((1 + 0) + 1) + 1
-- > (1 + 1) + 1
-- > 2 + 1
-- > 3
--
-- Implementarea tail-recursive nu este așa de evidentă. La fiecare pas
-- trebuie să reținem n - 1 și n - 2, iar la următorul pas n - 1 devine
-- n - 2 iar n devine n - 1.
--
-- Scriem primele elemente din șirul lui Fibonacci:
--
-- 0 1 1 2 3 5 8 13
--
-- și vrem să le calculăm pe avans. Pentru asta trebuie să reținem două
-- acumulatoare, câte unul pentru fiecare rezultat parțial al șirului
-- Fibonacci. "Mutăm" acumulatorul asociat lui (n - 1) în cel asociat
-- lui (n - 2) după calculul noului rezultat parțial:
--
-- acc1_0 = 1; acc1_1 = (1 + 0) = 1; acc1_2 = 2; acc1_3 = 3; ...
-- acc2_0 = 0; acc2_1 = acc1_0 = 1; acc2_2 = 1; acc2_3 = 2; ...
fibonacciTail n = let
fibonacciAux acc1 acc2 0 = acc2
fibonacciAux acc1 acc2 n = fibonacciAux (acc1 + acc2) acc1 (n - 1)
in fibonacciAux 1 0 n
-- Notă: cazul de bază fibonacciAux acc1 acc2 1 nu e necesar, va fi în
-- mod natural calculat când se trece de la 1 la 0 (acc2 va primi vechea
-- valoare a lui acc1 și va fi întors de funcție).
-- 3. Avem de implementat două funcții: concatenarea a două liste și
-- inversul unei liste. Cele două funcții sunt folosite pentru a ilustra
-- faptul că recursivitatea poate fi uneori făcută „în mod natural” pe
-- coadă.
--
-- 3.a. Concatenarea a două liste. Să luăm un exemplu:
-- cat [1,2,3,4] [5,6,7] = [1,2,3,4,5,6,7]
--
-- O putem privi ca pe adăugarea „în coada lui [1,2,3,4]” pe
-- [5,6,7]. Dat fiind că nu putem accesa coada lui [1,2,3,4] imediat,
-- trebuie să o parcurgem până ajungem la lista vidă.
--
-- În limbaj natural:
-- - concatenarea listei vide l1 cu o listă l2 este lista l2
-- - concatenarea unei liste formată din elementul h și lista l1 este
-- lista formată din elemetnul h și concatenarea lui l1 la l2.
cat [] l2 = l2
cat (h : l1) l2 = h : cat l1 l2
-- Evaluare:
-- > cat [1,2,3,4] [5,6,7]
-- > 1 : cat [2,3,4] [5,6,7]
-- > 1 : 2 : cat [3,4] [5,6,7]
-- > 1 : 2 : 3 : cat [4] [5,6,7]
-- > 1 : 2 : 3 : 4 : cat [] [5,6,7]
-- > 1 : 2 : 3 : 4 : [5,6,7]
--
-- Deși cat nu e tail-recursive, este ceea ce se numește „tail-recursive
-- modulo cons”, i.e. ultimul apel este un cons. Funcțiile
-- tail-recursive modulo cons pot fi la rândul lor optimizate să
-- utilizeze spațiu constant pe stivă.
-- 3.b. Inversarea ordinii elementelor dintr-o listă. La o primă vedere,
-- cea mai intuitivă metodă de implementare a funcției ar folosi append,
-- i.e. pentru pasul de recursivitate facem append în coada
-- listei. Dezavantajul acestei metode e că merge în O(n^2).
--
-- O implementare naturală ar fi însă cea în care facem cons într-un
-- acumulator, iar pe cazul de bază întoarcem acumulatorul:
-- - Inversul listei vide este acumulatorul
-- - Inversul unei liste formată din h și l este același cu inversul
-- lui l când acumulatorul conține ca prim element h.
inv l = let invAux acc [] = acc
invAux acc (h : l) = invAux (h : acc) l
in invAux [] l
-- Evaluare:
-- > inv [1,2,3,4]
-- > invAux [] [1,2,3,4]
-- > invAux (1 : []) [2,3,4]
-- > invAux (2 : [1]) [3,4]
-- > invAux (3 : [2,1]) [4]
-- > invAux (4 : [3,2,1]) []
-- > [4,3,2,1]
-- 4. Sortări pe liste.
-- 4.a. Merge sort
--
-- Considerăm două cazuri de bază: lista vidă și lista cu un singur
-- element. Al doilea caz e folosit pentru a lăsa recursivitatea să se
-- oprească în mod natural atunci când lista e împărțită în două.
mergeSort [] = []
mergeSort [x] = [x]
mergeSort l = let untilSplit = length l `div` 2
-- funcția de interclasare
merge l1 [] = l1
merge [] l2 = l2
merge (h1 : l1) (h2 : l2) = if h1 < h2
then h1 : merge l1 (h2 : l2)
else h2 : merge (h1 : l1) l2
-- împarte lista în două jumătăți (în funcție de
-- numărul de elemente)
left = take untilSplit l
right = drop untilSplit l
-- sortează rezultatele parțiale și le interclasează
in merge (mergeSort left) (mergeSort right)
-- 4.b. Insertion sort
--
-- E oarecum similar cu bubble sort, doar că în loc de swap, parcurge
-- lista și inserează elementele în ordinea dorită.
insertionSort [] = []
insertionSort (h : l) = let insert e [] = [e]
insert e (h : l) = if e < h
-- dacă elementul e mai
-- mic decât capul
-- listei, inserează-l în
-- cap
then e : h : l
-- altfel caută-i alt loc
else h : insert e l
-- inserează elementul în lista sortată
in insert h (insertionSort l)
-- 4.c. QuickSort
--
-- Ideea din spatele algoritmului:
-- - Alege un element pivot
-- - Împarte lista în două subliste:
-- + Sublista conținând elementele < pivot
-- + Sublista conținând elementele >= pivot
-- - Concatenează listele obținute plus pivotul
quickSort [] = []
quickSort (p : l) = let left = filter (< p) l
right = filter (>= p) l
in quickSort left ++ [p] ++ quickSort right
-- 5. Numărul de inversiuni dintr-o listă
--
-- Folosim aceeași definiție ca cea din laborator: având dată o listă l
-- și l[i] fiind elementul de pe poziția i (unde i începe cu 0 și se
-- termină cu lungimea listei - 1), să se afle numărul de elemente ale
-- listei care respectă proprietatea l[i] > l[j] și i < j.
--
-- (Sau, intuitiv, numărul de elemente care nu sunt „în poziția în care
-- ar trebui să fie” în raport cu o listă sortată.)
--
-- Intuitiv, trebuie să comparăm fiecare două elemente din listă la un
-- loc cu pozițiile lor, și să adunăm 1 la rezultatul parțial (aka
-- „acumulator”) când proprietatea ține
numberOfInversions l =
let -- am terminat de parcurs l1, întorc rezultatul
go [] l2 n1 n2 acc = acc
-- am terminat de parcurs l2, reparcurg pentru restul lui l1
go (h1 : l1) [] n1 n2 acc = go l1 l (n1 + 1) 0 acc
go (h1 : l1) (h2 : l2) n1 n2 acc =
-- dacă am o inversiune, incrementez acc, altfel îl las cum e
let acc1 = if h1 > h2 && n1 < n2 then acc + 1 else acc
in go (h1 : l1) l2 n1 (n2 + 1) acc1
in go l l 0 0 0
-- Alternativ, se poate modifica mergeSort pentru a număra inversiunile
-- dintr-o listă (în pasul de interclasare).
|
spyked/slides
|
misc-notes/pp-cb-labs/lab-02/lab-02.hs
|
cc0-1.0
| 8,319 | 2 | 16 | 2,359 | 995 | 568 | 427 | 46 | 4 |
-- Project Euler Problem 30 - digit fifth powers
--
-- sum all numbers that are the sum of the fifth powers of their digits
--
-- (does not include 1)
--
import Data.List
import Data.Char
digits x = [ digitToInt y | y <- (show x)]
p5dsum x = sum [ y^5 | y <- digits x]
max_possible = (9^5)*10 -- assuming no more than ten digits, which is true as 9^5 is a 6 digit number (bound not tight)
main = do
print ( sum [ x | x<-[2..max_possible], x==(p5dsum x)] )
|
yunwilliamyu/programming-exercises
|
project_euler/p030_digit_fifth_powers.hs
|
cc0-1.0
| 468 | 2 | 14 | 106 | 140 | 75 | 65 | 7 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-}
module Brainfuck.Machine where
-- $Id$
import Machine.Class
import Machine.Akzeptieren
import qualified Challenger as C
import qualified Machine.Acceptor.Type as A
import Brainfuck.Syntax.Data
import qualified Brainfuck.Env as E
--import Brainfuck.Nachfolger ( folgekonfigurationen )
import Autolib.Reporter
import Autolib.Reporter.Set
import qualified Autolib.Reporter.Checker
-- import qualified Autolib.Subset
import Autolib.Set
import Autolib.Size
import Autolib.ToDoc
instance Compute Statement E.Env where
next st env = mkSet [ E.next env | not $ E.accepting env ] -- Nachf-Env
accepting st env = E.accepting env -- fertig?
depth st env = fromIntegral $ E.depth env -- Schrittzahl
instance In Statement [ Integer ] E.Env where
input_reporter st input = do
return $ E.newEnv input [st]
instance Out Statement [ Integer ] E.Env where
output_reporter st env = return $ E.output env
instance Encode [Integer] where
-- unär
encode xs = xs
instance Decode [Integer] where
decode xs = case xs of
x : _ -> x
[] -> 0
{-
instance BrainfuckC y z =>
C.Partial A.Acceptor ( A.Type ( Brainfuck y z ) [y] )
( Brainfuck y z )
where
describe p i = vcat
[ text "Gesucht ist eine Maschine/ein Programm,"
, nest 4 $ A.machine_info i
, text "das diese Sprache akzeptiert:"
, nest 4 $ A.data_info i
, text "diese Eingaben sollen akzeptiert werden:"
, nest 4 $ toDoc $ A.yeah i
, text "diese Eingaben sollen nicht akzeptiert werden:"
, nest 4 $ toDoc $ A.noh i
]
initial p i = A.start i
partial p i b = Autolib.Reporter.Checker.run ( A.check i ) b
total p i b = do
positiv_liste (A.cut i) b $ A.yeah i
negativ_liste (A.cut i) b $ A.noh i
return () -- größe der maschine (hier) ignorieren
-}
|
Erdwolf/autotool-bonn
|
src/Brainfuck/Machine.hs
|
gpl-2.0
| 1,896 | 0 | 11 | 446 | 307 | 171 | 136 | 29 | 0 |
{-# LANGUAGE TemplateHaskell #-}
module Toy.Backend.Classify where
import AI.SVM.Base
import AI.SVM.Simple
import Data.Aeson
import Data.Aeson.TH
import Data.Text (pack)
data Classify = Classify FilePath Double deriving Show
deriveJSON defaultOptions ''Classify
classify :: Classify -> Int -> Bool -> Bool -> IO Bool
classify (Classify fp threshold) age smile gender = do
svm <- loadSVM fp
let b2d :: Bool -> Double
b2d = \x -> if x then 1 else -1
prediction = predict svm [fromIntegral age / 100, b2d gender, b2d smile]
return $ prediction > threshold
|
Qinka/reimagined-pancake
|
toy-backend/toy-backend-classify/src/svm/Toy/Backend/Classify.hs
|
gpl-3.0
| 630 | 0 | 13 | 164 | 197 | 105 | 92 | 16 | 2 |
-- Haskell Practical 4 Code - Domain Functions
-- By James Cowgill
module Prac4.DomFunc where
-- Domain function lists
-- Contain a list with the domain and a function to map the domain to values
type DomFunc n v = ([n], n -> v)
-- The empty list
empty :: DomFunc n v
empty = ([], undefined)
-- Return list of names in the list
names :: DomFunc n v -> [n]
names = fst
-- Return true if first argument is in the list
inAssoc :: Eq n => n -> DomFunc n v -> Bool
inAssoc val df = elem val (names df)
-- Fetch item from list or raise an error if it doesn't exist
fetch :: (Eq n, Show n) => n -> DomFunc n v -> v
fetch n df@(d, f) | inAssoc n df = f n
| otherwise = error ("key `" ++ (show n) ++ "` does not exist")
-- Updates / adds an item in the list
update :: Eq n => n -> v -> DomFunc n v -> DomFunc n v
update k v df@(d, f) = (newD, newF)
where
newD | inAssoc k df = d
| otherwise = [k] ++ d
newF n | n == k = v
| otherwise = f n
-- Conversion to association lists
toAssocList :: DomFunc n v -> [(n, v)]
toAssocList (d, f) = [(n, f n) | n <- d]
|
jcowgill/cs-work
|
syac/compilers/Prac4/DomFunc.hs
|
gpl-3.0
| 1,145 | 0 | 11 | 340 | 405 | 215 | 190 | 19 | 1 |
{-
----------------------------------------------------------------------------------
- Copyright (C) 2010-2011 Massachusetts Institute of Technology
- Copyright (C) 2010-2011 Yuan Tang <[email protected]>
- Charles E. Leiserson <[email protected]>
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-
- Suggestsions: [email protected]
- Bugs: [email protected]
-
--------------------------------------------------------------------------------
-}
module Main where
import System
import IO hiding (try) -- "try" is also defined in Parsec
import Data.List
import System.Directory
import System.Cmd (rawSystem)
import Data.Char (isSpace)
import qualified Data.Map as Map
import Text.ParserCombinators.Parsec (runParser)
import PData
import PMainParser
main :: IO ()
main = do args <- getArgs
whilst (null args) $ do
printUsage
exitFailure
let (inFiles, inDirs, mode, debug, showFile, userArgs)
= parseArgs ([], [], PDefault, False, True, []) args
whilst (mode == PHelp) $ do
printOptions
exitFailure
whilst (mode /= PNoPP) $ do
ppopp (mode, debug, showFile, userArgs) (zip inFiles inDirs)
-- pass everything to icc after preprocessing and Pochoir optimization
let iccArgs = userArgs
putStrLn (icc ++ " " ++ intercalate " " iccArgs)
rawSystem icc iccArgs
whilst (showFile == False) $ do
let outFiles = map (rename "_pochoir") inFiles
removeFile $ intercalate " " outFiles
whilst :: Bool -> IO () -> IO ()
whilst True action = action
whilst False action = return ()
ppopp :: (PMode, Bool, Bool, [String]) -> [(String, String)] -> IO ()
ppopp (_, _, _, _) [] = return ()
ppopp (mode, debug, showFile, userArgs) ((inFile, inDir):files) =
do putStrLn ("pochoir called with mode =" ++ show mode)
pochoirLibPath <- catch (getEnv "POCHOIR_LIB_PATH")(\e -> return "EnvError")
whilst (pochoirLibPath == "EnvError") $ do
putStrLn ("Pochoir environment variable not set:")
putStrLn ("POCHOIR_LIB_PATH")
exitFailure
{-
cilkStubPath <- catch (getEnv "CILK_HEADER_PATH")(\e -> return "EnvError")
whilst (cilkStubPath == "EnvError") $ do
putStrLn ("Environment variable CILK_HEADER_PATH is NOT set")
exitFailure
let envPath = ["-I" ++ cilkStubPath] ++ ["-I" ++ pochoirLibPath]
-}
let envPath = ["-I" ++ pochoirLibPath]
let iccPPFile = inDir ++ getPPFile inFile
let iccPPArgs = if debug == False
then iccPPFlags ++ envPath ++ [inFile]
else iccDebugPPFlags ++ envPath ++ [inFile]
-- a pass of icc preprocessing
putStrLn (icc ++ " " ++ intercalate " " iccPPArgs)
rawSystem icc iccPPArgs
-- a pass of pochoir compilation
whilst (mode /= PDebug) $ do
let outFile = rename "_pochoir" inFile
inh <- openFile iccPPFile ReadMode
outh <- openFile outFile WriteMode
putStrLn ("pochoir " ++ show mode ++ " " ++ iccPPFile)
pProcess mode inh outh
hClose inh
hClose outh
whilst (mode == PDebug) $ do
let midFile = getMidFile inFile
let outFile = rename "_pochoir" midFile
putStrLn ("mv " ++ midFile ++ " " ++ outFile)
renameFile midFile outFile
ppopp (mode, debug, showFile, userArgs) files
getMidFile :: String -> String
getMidFile a
| isSuffixOf ".cpp" a || isSuffixOf ".cxx" a = take (length a - 4) a ++ ".i"
| otherwise = a
rename :: String -> String -> String
rename pSuffix fname = name ++ pSuffix ++ ".cpp"
where (name, suffix) = break ('.' ==) fname
getPPFile :: String -> String
getPPFile fname = name ++ ".i"
where (name, suffix) = break ('.' ==) fname
{-
getObjFile :: String -> String -> [String]
getObjFile dir fname = ["-o"] ++ [dir++name]
where (name, suffix) = break ('.' ==) fname
-}
pInitState = ParserState { pMode = PCaching, pState = Unrelated, pMacro = Map.empty, pArray = Map.empty, pStencil = Map.empty, pShape = Map.empty, pRange = Map.empty, pKernel = Map.empty}
icc = "icpc"
iccFlags = ["-O3", "-DNDEBUG", "-std=c++0x", "-Wall", "-Werror", "-ipo"]
iccPPFlags = ["-P", "-C", "-DNCHECK_SHAPE", "-DNDEBUG", "-std=c++0x", "-Wall", "-Werror", "-ipo"]
-- iccDebugFlags = ["-DDEBUG", "-O0", "-g3", "-std=c++0x", "-include", "cilk_stub.h"]
iccDebugFlags = ["-DDEBUG", "-O0", "-g3", "-std=c++0x"]
-- iccDebugPPFlags = ["-P", "-C", "-DCHECK_SHAPE", "-DDEBUG", "-g3", "-std=c++0x", "-include", "cilk_stub.h"]
iccDebugPPFlags = ["-P", "-C", "-DCHECK_SHAPE", "-DDEBUG", "-g3", "-std=c++0x"]
parseArgs :: ([String], [String], PMode, Bool, Bool, [String]) -> [String] -> ([String], [String], PMode, Bool, Bool, [String])
parseArgs (inFiles, inDirs, mode, debug, showFile, userArgs) aL
| elem "--help" aL =
let l_mode = PHelp
aL' = delete "--help" aL
in (inFiles, inDirs, l_mode, debug, showFile, aL')
| elem "-h" aL =
let l_mode = PHelp
aL' = delete "-h" aL
in (inFiles, inDirs, l_mode, debug, showFile, aL')
| elem "-auto-optimize" aL =
let l_mode = PDefault
aL' = delete "-auto-optimize" aL
in (inFiles, inDirs, l_mode, debug, showFile, aL')
| elem "-split-caching" aL =
let l_mode = PCaching
aL' = delete "-split-caching" aL
in parseArgs (inFiles, inDirs, l_mode, debug, showFile, aL') aL'
| elem "-split-c-pointer" aL =
let l_mode = PCPointer
aL' = delete "-split-c-pointer" aL
in parseArgs (inFiles, inDirs, l_mode, debug, showFile, aL') aL'
| elem "-split-opt-pointer" aL =
let l_mode = POptPointer
aL' = delete "-split-opt-pointer" aL
in parseArgs (inFiles, inDirs, l_mode, debug, showFile, aL') aL'
| elem "-split-pointer" aL =
let l_mode = PPointer
aL' = delete "-split-pointer" aL
in parseArgs (inFiles, inDirs, l_mode, debug, showFile, aL') aL'
| elem "-split-macro-shadow" aL =
let l_mode = PMacroShadow
aL' = delete "-split-macro-shadow" aL
in parseArgs (inFiles, inDirs, l_mode, debug, showFile, aL') aL'
| elem "-showFile" aL =
let l_showFile = True
aL' = delete "-showFile" aL
in parseArgs (inFiles, inDirs, mode, debug, l_showFile, aL') aL'
| elem "-debug" aL =
let l_debug = True
l_mode = PDebug
aL' = delete "-debug" aL
in parseArgs (inFiles, inDirs, l_mode, l_debug, showFile, aL') aL'
| null aL == False =
let (l_files, l_dirs, l_mode, aL') = findCPP aL ([], [], mode, aL)
in (l_files, l_dirs, l_mode, debug, showFile, aL')
| otherwise =
let l_mode = PNoPP
in (inFiles, inDirs, l_mode, debug, showFile, aL)
findCPP :: [String] -> ([String], [String], PMode, [String]) -> ([String], [String], PMode, [String])
findCPP [] (l_files, l_dirs, l_mode, l_al) =
let l_mode' =
if null l_files == True || null l_dirs == True then PNoPP else l_mode
in (l_files, l_dirs, l_mode', l_al)
findCPP (a:as) (l_files, l_dirs, l_mode, l_al)
| isSuffixOf ".cpp" a || isSuffixOf ".cxx" a =
let l_file = drop (1 + (pLast $ findIndices (== '/') a)) a
l_dir = take (1 + (pLast $ findIndices (== '/') a)) a
l_files' = l_files ++ [l_file]
l_dirs' = l_dirs ++ [l_dir]
pLast [] = -1
pLast aL@(a:as) = last aL
l_pochoir_file = rename "_pochoir" l_file
(prefix, suffix) = break (a == ) l_al
l_al' = prefix ++ [l_pochoir_file] ++ tail suffix
in findCPP as (l_files', l_dirs', l_mode, l_al')
| otherwise = findCPP as (l_files, l_dirs, l_mode, l_al)
printUsage :: IO ()
printUsage =
do putStrLn ("Usage: pochoir [OPTION] [filename]")
putStrLn ("Try `pochoir --help' for more options.")
printOptions :: IO ()
printOptions =
do putStrLn ("Usage: pochoir [OPTION] [filename]")
putStrLn ("Run the Pochoir stencil compiler on [filename].")
putStrLn ("-auto-optimize : " ++ breakline ++ "Let the Pochoir compiler automatically choose the best optimizing level for you! (default)")
putStrLn ("-split-macro-shadow $filename : " ++ breakline ++
"using macro tricks to split the interior and boundary regions")
putStrLn ("-split-pointer $filename : " ++ breakline ++
"Default Mode : split the interior and boundary region, and using C-style pointer to optimize the base case")
pProcess :: PMode -> Handle -> Handle -> IO ()
pProcess mode inh outh =
do ls <- hGetContents inh
let pRevInitState = pInitState { pMode = mode }
case runParser pParser pRevInitState "" $ stripWhite ls of
Left err -> print err
Right str -> hPutStrLn outh str
|
rrnewton/pochoir-first-history-fix-attempt
|
PMain.hs
|
gpl-3.0
| 9,712 | 0 | 16 | 2,620 | 2,608 | 1,378 | 1,230 | 165 | 3 |
-- P21 Insert an element at a given position into a list.
-- Using "splitAt"
f1 :: Int -> a -> [a] -> [a]
f1 n x ys = let (a, b) = splitAt (pred n) ys in a ++ x : b
-- By index (inefficient)
f2 :: Int -> a -> [a] -> [a]
f2 n x ys = take (pred n) ys ++ x : drop (pred n) ys
-- Recursion
f3 :: Int -> a -> [a] -> [a]
f3 1 x ys = x : ys
f3 n x (y:ys) = y : f3 (pred n) x ys
-- Tail recursion
f4 :: Int -> a -> [a] -> [a]
f4 = f []
where f acc 1 x ys = reverse acc ++ x : ys
f acc n x (y:ys) = f (y:acc) (pred n) x ys
|
pavelfatin/ninety-nine
|
haskell/p21.hs
|
gpl-3.0
| 531 | 1 | 11 | 161 | 341 | 171 | 170 | 11 | 2 |
{-
Guess
Copyright (C) 2017
Jonathan Lamothe <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at
your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
module Main where
import Control.Monad
import System.Exit
import Test.HUnit
main = do
counts <- runTestTT tests
when (failures counts > 0 || errors counts > 0)
exitFailure
tests :: Test
tests = TestList []
-- jl
|
jlamothe/guess
|
tests.hs
|
gpl-3.0
| 911 | 0 | 12 | 162 | 80 | 42 | 38 | 10 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, TemplateHaskell, QuasiQuotes #-}
-- | Compile Lamdu vals to Javascript
module Lamdu.Eval.JS.Compiler
( Actions(..)
, ValId(..)
, compileRepl, Mode(..), MemoDefs(..), loggingEnabled
) where
import qualified Control.Lens as Lens
import Control.Monad.State (MonadState)
import Control.Monad.Trans.FastRWS (RWST, runRWST)
import Control.Monad.Writer (MonadWriter(..), censor)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Base16 as Hex
import qualified Data.Char as Char
import Data.Default () -- instances
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Text as Text
import Data.Text.Encoding (decodeUtf8)
import Data.UUID.Types (UUID)
import qualified Data.UUID.Utils as UUIDUtils
import Hyper
import Hyper.Syntax.Nominal (ToNom(..))
import Hyper.Syntax.Row (RowExtend(..))
import Hyper.Type.Prune (Prune)
import qualified Lamdu.Builtins.Anchors as Builtins
import qualified Lamdu.Builtins.PrimVal as PrimVal
import Lamdu.Calc.Definition (depsGlobalTypes)
import Lamdu.Calc.Identifier (identHex)
import Lamdu.Calc.Infer (alphaEq)
import qualified Lamdu.Calc.Lens as ExprLens
import Lamdu.Calc.Term (Val)
import qualified Lamdu.Calc.Term as V
import qualified Lamdu.Calc.Term.Utils as Flatten
import qualified Lamdu.Calc.Type as T
import Lamdu.Data.Anchors (anonTag)
import qualified Lamdu.Data.Definition as Definition
import qualified Lamdu.Eval.Results as ER
import qualified Lamdu.Expr.UniqueId as UniqueId
import qualified Language.ECMAScript3.PrettyPrint as JSPP
import qualified Language.ECMAScript3.Syntax as JSS
import qualified Language.ECMAScript3.Syntax.CodeGen as JS
import Language.ECMAScript3.Syntax.QuasiQuote (jsstmt)
import Numeric.Lens (hex)
import qualified Text.PrettyPrint.ANSI.Leijen as Pretty
import Lamdu.Prelude
newtype ValId = ValId UUID
data IsTailCall = TailCall | NotTailCall
data MemoDefs
= MemoDefs -- Compatible with SlowLogging mode and used when running from Lamdu
| ReleaseDontMemoDefs
deriving Show
data Mode = Fast MemoDefs | SlowLogging LoggingInfo
deriving Show
data Actions m = Actions
{ readAssocName :: T.Tag -> m Text
, readAssocTag :: UUID -> m T.Tag
, readGlobal :: V.Var -> m (Definition.Definition (Val ValId) ())
, readGlobalType :: V.Var -> m (Pure # T.Scheme)
, output :: String -> m ()
, loggingMode :: Mode
}
type LocalVarName = JSS.Id ()
type GlobalVarName = JSS.Id ()
newtype LoggingInfo = LoggingInfo
{ _liScopeDepth :: Int
} deriving stock Show
Lens.makeLenses ''LoggingInfo
data Env m = Env
{ _envActions :: Actions m
, _envLocals :: Map V.Var LocalVarName
, _envMode :: Mode
, _envExpectedTypes :: Map V.Var (Pure # T.Scheme)
, _envCurrentGlobal :: ER.WhichGlobal
}
Lens.makeLenses ''Env
data NameKind
= VarName -- name of Javascript variable (never exposed outside)
| TagName -- name of record field or inject-tag identifier
deriving (Eq, Ord, Show)
data State = State
{ _freshId :: Int
, _names :: Map (NameKind, Text) (Map UUID Text)
, _globalVarNames :: Map V.Var GlobalVarName
, _globalTypes :: Map V.Var (Pure # T.Scheme)
}
Lens.makeLenses ''State
data LogUsed
= LogUnused
| LogUsed
deriving (Eq, Ord, Show)
instance Semigroup LogUsed where
LogUsed <> _ = LogUsed
_ <> LogUsed = LogUsed
_ <> _ = LogUnused
instance Monoid LogUsed where mempty = LogUnused
newtype M m a = M { unM :: RWST (Env m) LogUsed State m a }
deriving newtype
( Functor, Applicative, Monad
, MonadReader (Env m)
, MonadWriter LogUsed
, MonadState State
)
infixl 4 $.
($.) :: JSS.Expression () -> JSS.Id () -> JSS.Expression ()
($.) = JS.dot
infixl 3 $$
($$) :: JSS.Expression () -> JSS.Expression () -> JSS.Expression ()
f $$ x = f `JS.call` [x]
pp :: JSS.Statement () -> String
pp = (`Pretty.displayS`"") . Pretty.renderPretty 1.0 90 . JSPP.prettyPrint
performAction :: Monad m => (Actions m -> m a) -> M m a
performAction f = Lens.view envActions <&> f >>= lift & M
ppOut :: Monad m => JSS.Statement () -> M m ()
ppOut stmt = performAction (`output` pp stmt)
-- Multiple vars using a single "var" is badly formatted and generally
-- less readable than a vardecl for each:
varinit :: JSS.Id () -> JSS.Expression () -> JSS.Statement ()
varinit ident expr = JS.vardecls [JS.varinit ident expr]
scopeIdent :: Int -> JSS.Id ()
scopeIdent depth = "scopeId_" ++ show depth & JS.ident
rts :: JSS.Id () -> JSS.Expression ()
rts = (JS.var "rts" $.)
declLog :: Int -> JSS.Statement ()
declLog depth =
varinit "log" $
JS.lambda ["exprId", "result"]
[ rts "logResult" `JS.call`
[ JS.var (scopeIdent depth)
, JS.var "exprId"
, JS.var "result"
] & JS.returns
]
-- | Taken from http://www.ecma-international.org/ecma-262/6.0/#sec-keywords
jsReservedKeywords :: Set Text
jsReservedKeywords =
Set.fromList
[ "break" , "do" , "in" , "typeof"
, "case" , "else" , "instanceof", "var"
, "catch" , "export" , "new" , "void"
, "class" , "extends" , "return" , "while"
, "const" , "finally" , "super" , "with"
, "continue" , "for" , "switch" , "yield"
, "debugger" , "function" , "this" , "default"
, "if" , "throw" , "delete" , "import"
, "try" , "let" , "static" , "enum"
, "await" , "implements", "package" , "protected"
, "interface", "private" , "public"
]
jsReservedNamespace :: Set Text
jsReservedNamespace =
Set.fromList
[ "x", "repl"
, "Object", "console", "repl"
, "log", "scopeCounter", "rts"
, "tag", "data", "array", "bytes", "func", "cacheId", "number"
, "trampolineTo"
]
jsAllReserved :: Set Text
jsAllReserved = jsReservedNamespace <> jsReservedKeywords
isReservedName :: Text -> Bool
isReservedName name =
jsAllReserved ^. Lens.contains name
|| any (`Text.isPrefixOf` name)
[ "global_"
, "local_"
, "scopeId_"
]
topLevelDecls :: Mode -> [JSS.Statement ()]
topLevelDecls mode =
( [ [jsstmt|"use strict";|]
, [jsstmt|var rts = require('rts.js');|]
] <&> void
) ++
case mode of
Fast{} -> []
SlowLogging{} ->
( [ [jsstmt|var scopeId_0 = 0;|]
, [jsstmt|var scopeCounter = 1;|]
] <&> void
) ++
[ declLog 0
]
loggingEnabled :: Mode
loggingEnabled = SlowLogging LoggingInfo { _liScopeDepth = 0 }
compileRepl :: Monad m => Actions m -> Definition.Expr (Val ValId) -> m ()
compileRepl actions defExpr =
runRWST
( traverse_ ppOut (topLevelDecls (loggingMode actions))
>> compileDefExpr defExpr
<&> codeGenExpression <&> scaffold >>= traverse_ ppOut & unM
) (initialEnv actions) initialState <&> (^. _1)
-- | Top-level wrapepr for the code (catch exceptions in repl
-- execution, log it, and export the module symbols)
scaffold :: JSS.Expression () -> [JSS.Statement ()]
scaffold replExpr =
[ JS.trycatch
( JS.block
[ varinit "repl" replExpr
, void [jsstmt|rts.logRepl(repl);|]
]
)
( JS.catch "err"
(JS.block [ void [jsstmt|rts.logReplErr(err);|] ])
)
Nothing
, -- This form avoids outputing repl's value in interactive mode
void [jsstmt|(function() { module.exports = repl; })();|]
]
initialEnv :: Actions m -> Env m
initialEnv actions =
Env
{ _envActions = actions
, _envLocals = mempty
, _envMode = loggingMode actions
, _envExpectedTypes = mempty
, _envCurrentGlobal = ER.GlobalRepl
}
initialState :: State
initialState =
State
{ _freshId = 0
, _names = mempty
, _globalVarNames = mempty
, _globalTypes = mempty
}
-- | Reset reader/writer components of RWS for a new global compilation context
withGlobal :: Monad m => ER.WhichGlobal -> M m a -> M m a
withGlobal whichGlobal act =
act
& censor (const LogUnused)
& local (envLocals .~ mempty)
& local (\x -> x & envMode .~ loggingMode (x ^. envActions))
& local (envCurrentGlobal .~ whichGlobal)
freshName :: Monad m => Text -> M m Text
freshName prefix =
freshId <+= 1
<&> show
<&> Text.pack
<&> (prefix <>)
& M
avoidReservedNames :: Text -> Text
avoidReservedNames name
| isReservedName name = "_" <> name
| otherwise = name
escapeName :: Text -> Text
escapeName name =
case Text.unpack name of
(d:xs) | Char.isDigit d -> '_' : d : replaceSpecialChars xs
xs -> replaceSpecialChars xs
& Text.pack
replaceSpecialChars :: String -> String
replaceSpecialChars = concatMap replaceSpecial
where
replaceSpecial x
| Char.isAlphaNum x = [x]
| x == '_' = "__"
| otherwise = '_' : ((hex #) . Char.ord) x ++ "_"
readName :: (UniqueId.ToUUID a, Monad m) => a -> M m Text -> M m Text
readName g act =
do
tag <- performAction (`readAssocTag` uuid)
(if tag == anonTag then act else readTagName tag act)
>>= generatedName VarName uuid
where
uuid = UniqueId.toUUID g
generatedName :: Monad m => NameKind -> UUID -> Text -> M m Text
generatedName kind uuid name =
names . Lens.at (kind, name) %%=
\case
Nothing -> (name, Just (Map.singleton uuid name))
Just uuidMap ->
uuidMap
& Lens.at uuid %%~
\case
Nothing -> (newName, Just newName)
where
newName = name <> Text.pack (show (Map.size uuidMap))
Just oldName -> (oldName, Just oldName)
<&> Just
readTagName :: Monad m => T.Tag -> M m Text -> M m Text
readTagName tag act =
performAction (`readAssocName` tag)
<&> avoidReservedNames
<&> escapeName
>>=
\case
"" -> act
name -> pure name
freshStoredName :: (Monad m, UniqueId.ToUUID a) => a -> Text -> M m Text
freshStoredName g prefix = readName g (freshName prefix)
tagString :: Monad m => T.Tag -> M m Text
tagString tag@(T.Tag ident) =
"tag" ++ identHex ident & Text.pack & pure
& readTagName tag
>>= generatedName TagName (UniqueId.toUUID tag)
tagIdent :: Monad m => T.Tag -> M m (JSS.Id ())
tagIdent = fmap (JS.ident . Text.unpack) . tagString
withLocalVar :: Monad m => V.Var -> M m a -> M m (LocalVarName, a)
withLocalVar v act =
do
varName <- freshStoredName v "local_" <&> Text.unpack <&> JS.ident
res <- local (envLocals . Lens.at v ?~ varName) act
pure (varName, res)
compileDefExpr :: Monad m => Definition.Expr (Val ValId) -> M m CodeGen
compileDefExpr (Definition.Expr x frozenDeps) =
compileVal NotTailCall x & local (envExpectedTypes .~ frozenDeps ^. depsGlobalTypes)
compileGlobal :: Monad m => V.Var -> M m CodeGen
compileGlobal globalId =
do
def <- performAction (`readGlobal` globalId)
globalTypes . Lens.at globalId ?= def ^. Definition.defType
case def ^. Definition.defBody of
Definition.BodyBuiltin ffiName -> ffiCompile ffiName & codeGenFromExpr & pure
Definition.BodyExpr defExpr -> compileDefExpr defExpr
& withGlobal (ER.GlobalDef globalId)
throwErr :: Monad m => ValId -> ER.CompiledErrorType -> M m CodeGen
throwErr valId err =
Lens.view envCurrentGlobal
<&> \curGlobal ->
[ (rts "exceptions" $. JS.ident (ER.encodeCompiledError err))
`JS.call`
[ JS.string (ER.encodeWhichGlobal curGlobal)
, jsValId valId
] & JS.throw
] & codeGenFromLamStmts
useGlobal :: Mode -> JSS.Id () -> JSS.Expression ()
useGlobal (Fast ReleaseDontMemoDefs) x = JS.var x
useGlobal _ x = JS.var x `JS.call` []
wrapGlobalDef :: Mode -> CodeGen -> JSS.Expression ()
wrapGlobalDef (Fast ReleaseDontMemoDefs) = codeGenExpression
wrapGlobalDef _ = (rts "memo" `JS.call`) . (: []) . JS.lambda [] . codeGenLamStmts
compileGlobalVar :: Monad m => ValId -> V.Var -> M m CodeGen
compileGlobalVar valId var =
do
mode <- Lens.view envMode
let newGlobal =
do
varName <- freshStoredName var "global_" <&> Text.unpack <&> JS.ident
globalVarNames . Lens.at var ?= varName
compileGlobal var
<&> wrapGlobalDef mode
<&> varinit varName
>>= ppOut
pure varName
let loadGlobal =
Lens.use (globalVarNames . Lens.at var)
>>= maybe newGlobal pure
<&> useGlobal mode
<&> codeGenFromExpr
let verifyType expectedType =
do
scheme <-
Lens.use (globalTypes . Lens.at var)
>>= maybe newGlobalType pure
if alphaEq scheme expectedType
then loadGlobal
else throwErr valId ER.DependencyTypeOutOfDate
Lens.view (envExpectedTypes . Lens.at var)
>>= maybe loadGlobal verifyType
where
newGlobalType =
do
scheme <- performAction (`readGlobalType` var)
globalTypes . Lens.at var ?= scheme
pure scheme
compileLocalVar :: JSS.Id () -> CodeGen
compileLocalVar = codeGenFromExpr . JS.var
compileVar :: Monad m => ValId -> V.Var -> M m CodeGen
compileVar valId v =
Lens.view (envLocals . Lens.at v)
>>= maybe (compileGlobalVar valId v) (pure . compileLocalVar)
data CodeGen = CodeGen
{ codeGenLamStmts :: [JSS.Statement ()]
, codeGenExpression :: JSS.Expression ()
}
unitRedex :: [JSS.Statement ()] -> JSS.Expression ()
unitRedex stmts = JS.lambda [] stmts `JS.call` []
codeGenFromLamStmts :: [JSS.Statement ()] -> CodeGen
codeGenFromLamStmts stmts =
CodeGen
{ codeGenLamStmts = stmts
, codeGenExpression = unitRedex stmts
}
codeGenFromExpr :: JSS.Expression () -> CodeGen
codeGenFromExpr expr =
CodeGen
{ codeGenLamStmts = [JS.returns expr]
, codeGenExpression = expr
}
lam ::
Monad m => Text ->
(JSS.Expression () -> M m [JSS.Statement ()]) ->
M m (JSS.Expression ())
lam prefix code =
do
var <- freshName prefix <&> Text.unpack <&> JS.ident
code (JS.var var) <&> JS.lambda [var]
inject :: JSS.Expression () -> JSS.Expression () -> JSS.Expression ()
inject tagStr dat' =
JS.object
[ (JS.propId "tag", tagStr)
, (JS.propId "data", dat')
]
ffiCompile :: Definition.FFIName -> JSS.Expression ()
ffiCompile (Definition.FFIName modul funcName) =
foldl ($.) (rts "builtins") (modul <&> Text.unpack <&> JS.ident)
`JS.brack` JS.string (Text.unpack funcName)
compileLiteral :: V.PrimVal -> CodeGen
compileLiteral literal =
case PrimVal.toKnown literal of
PrimVal.Bytes bytes ->
rts "bytes" $$ JS.array ints & codeGenFromExpr
where
ints = [JS.int (fromIntegral byte) | byte <- BS.unpack bytes]
PrimVal.Float num -> JS.number num & codeGenFromExpr
PrimVal.Char c -> Char.ord c & fromIntegral & JS.number & codeGenFromExpr
compileRecExtend :: Monad m => RowExtend T.Tag V.Term V.Term # Annotated ValId -> M m CodeGen
compileRecExtend x =
do
Flatten.Composite tags mRest <-
Flatten.recExtend x & Lens.traverse (compileVal NotTailCall)
extends <-
tags ^@.. Lens.itraversed
<&> _2 %~ codeGenExpression
& (traverse . _1) (fmap JS.propId . tagIdent)
<&> JS.object
case mRest of
Nothing -> codeGenFromExpr extends
Just rest ->
codeGenFromLamStmts
[ varinit "x"
((JS.var "Object" $. "assign") `JS.call` [extends, codeGenExpression rest])
, JS.expr (JS.delete (JS.var "x" $. "cacheId"))
, JS.returns (JS.var "x")
]
& pure
compileInject :: Monad m => T.Tag -> M m CodeGen
compileInject tag =
do
var <- freshName "content" <&> Text.unpack <&> JS.ident
tagStr <- tagString tag <&> Text.unpack <&> JS.string
JS.lambda [var] [JS.returns (inject tagStr (JS.var var))] & codeGenFromExpr & pure
compileCase :: Monad m => ValId -> RowExtend T.Tag V.Term V.Term # Annotated ValId -> M m CodeGen
compileCase valId =
-- we're generating a lambda here, which will be called via
-- rts.rerun, so its body is as much a tail-call position as any
-- lambda
fmap codeGenFromExpr . lam "x" . compileCaseOnVar TailCall valId
compileCaseOnVar ::
Monad m =>
IsTailCall -> ValId -> RowExtend T.Tag V.Term V.Term # Annotated ValId ->
JSS.Expression () -> M m [JSS.Statement ()]
compileCaseOnVar isTail valId x scrutineeVar =
do
tagsStr <- tags ^@.. Lens.itraversed & (traverse . _1) tagString
cases <- traverse makeCase tagsStr
defaultCase <-
case mRestHandler of
Nothing -> throwErr valId ER.UnhandledCase
Just restHandler ->
compileAppliedFunc isTail valId restHandler scrutineeVar
<&> codeGenLamStmts
<&> JS.defaultc
pure [JS.switch (scrutineeVar $. "tag") (cases ++ [defaultCase])]
where
Flatten.Composite tags mRestHandler = Flatten.case_ x
makeCase (tagStr, handler) =
compileAppliedFunc isTail valId handler (scrutineeVar $. "data")
<&> codeGenLamStmts
<&> JS.casee (JS.string (Text.unpack tagStr))
compileGetField :: Monad m => T.Tag -> M m CodeGen
compileGetField tag =
do
var <- freshName "record" <&> Text.unpack <&> JS.ident
tagId <- tagIdent tag
JS.lambda [var] [JS.returns (JS.var var `JS.dot` tagId)] & codeGenFromExpr & pure
declMyScopeDepth :: Int -> JSS.Statement ()
declMyScopeDepth depth =
varinit (scopeIdent depth) $
JS.uassign JSS.PostfixInc "scopeCounter"
jsValId :: ValId -> JSS.Expression ()
jsValId (ValId uuid) = (JS.string . Text.unpack . decodeUtf8 . Hex.encode . UUIDUtils.toSBS16) uuid
callLogNewScope :: Int -> Int -> ValId -> JSS.Expression () -> JSS.Statement ()
callLogNewScope parentDepth myDepth lamValId argVal =
rts "logNewScope" `JS.call`
[ JS.var (scopeIdent parentDepth)
, JS.var (scopeIdent myDepth)
, jsValId lamValId
, argVal
] & JS.expr
slowLoggingLambdaPrefix ::
LogUsed -> Int -> ValId -> JSS.Expression () -> [JSS.Statement ()]
slowLoggingLambdaPrefix logUsed parentScopeDepth lamValId argVal =
[ declMyScopeDepth myScopeDepth
, callLogNewScope parentScopeDepth myScopeDepth lamValId argVal
] ++
[ declLog myScopeDepth | LogUsed <- [logUsed] ]
where
myScopeDepth = parentScopeDepth + 1
listenNoTellLogUsed :: Monad m => M m a -> M m (a, LogUsed)
listenNoTellLogUsed = censor (const LogUnused) . listen
compileLambda ::
Monad m =>
ValId ->
V.TypedLam V.Var (HCompose Prune T.Type) V.Term # Annotated ValId ->
M m CodeGen
compileLambda valId (V.TypedLam v _paramTyp res) =
Lens.view envMode
>>= \case
Fast{} -> compileRes <&> mkLambda
SlowLogging loggingInfo ->
do
((varName, lamStmts), logUsed) <-
compileRes
& local
(envMode .~ SlowLogging (loggingInfo & liScopeDepth .~ 1 + parentScopeDepth))
& listenNoTellLogUsed
let stmts =
slowLoggingLambdaPrefix logUsed parentScopeDepth valId
(JS.var varName)
fastLam <- compileRes & local (envMode .~ Fast MemoDefs) <&> mkLambda
rts "wrap" `JS.call`
[fastLam, JS.lambda [varName] (stmts ++ lamStmts)] & pure
where
parentScopeDepth = loggingInfo ^. liScopeDepth
>>= optimizeExpr
<&> codeGenFromExpr
where
mkLambda (varId, lamStmts) = JS.lambda [varId] lamStmts
compileRes = compileVal TailCall res <&> codeGenLamStmts & withLocalVar v
compileApply ::
Monad m => IsTailCall -> ValId -> V.App V.Term # Annotated ValId -> M m CodeGen
compileApply isTail valId (V.App func arg) =
do
arg' <- compileVal NotTailCall arg <&> codeGenExpression
compileAppliedFunc isTail valId func arg'
logSubexprResult :: Monad m => ValId -> CodeGen -> M m CodeGen
logSubexprResult valId codeGen =
codeGenFromExpr
(JS.var "log" `JS.call` [jsValId valId, codeGenExpression codeGen])
<$ tell LogUsed
maybeLogSubexprResult :: Monad m => ValId -> CodeGen -> M m CodeGen
maybeLogSubexprResult valId codeGen =
Lens.view envMode
>>= \case
Fast{} -> pure codeGen
SlowLogging{} -> runTrampoline codeGen & logSubexprResult valId
runTrampoline :: CodeGen -> CodeGen
runTrampoline application =
rts "rerun" $$ codeGenExpression application & codeGenFromExpr
compileAppliedFunc :: Monad m => IsTailCall -> ValId -> Val ValId -> JSS.Expression () -> M m CodeGen
compileAppliedFunc isTail valId func arg' =
do
mode <- Lens.view envMode
case (mode, func ^. hVal) of
-- in slow mode - we want the results associated with the
-- intermediate vars to be reported as is for simplicity
-- on the gui side:
(Fast {}, V.BCase case_) ->
compileCaseOnVar isTail valId case_ (JS.var "x")
<&> (varinit "x" arg' :)
<&> codeGenFromLamStmts
>>= maybeLogSubexprResult valId
(Fast {}, V.BLam (V.TypedLam v _paramTyp res)) ->
do
(vId, lamStmts) <-
compileVal isTail res <&> codeGenLamStmts
& withLocalVar v
CodeGen
{ codeGenLamStmts = varinit vId arg' : lamStmts
, codeGenExpression =
-- Can't really optimize a redex in expr
-- context, as at least 1 redex must be paid
JS.lambda [vId] lamStmts $$ arg'
}
& maybeLogSubexprResult valId
(_, V.BLeaf V.LHole) ->
throwErr valId ER.ReachedHole
<&> \holeExc ->
JS.expr arg' : codeGenLamStmts holeExc
& codeGenFromLamStmts
(_, V.BLeaf V.LFromNom {}) -> codeGenFromExpr arg' & pure
_ ->
compileVal NotTailCall func
<&> codeGenExpression
<&> ($$ arg')
>>= optimizeExpr
<&> codeGenFromExpr
>>= case isTail of
NotTailCall -> maybeLogSubexprResult valId . runTrampoline
TailCall -> fmap codeGenFromExpr . trampoline
where
trampoline application =
maybeLogSubexprResult valId application
<&> \loggedApp ->
JS.object
[ ( JS.propId "trampolineTo"
, codeGenLamStmts loggedApp & JS.lambda []
)
]
optimizeExpr :: Monad m => JSS.Expression () -> M m (JSS.Expression ())
optimizeExpr x@(JSS.CallExpr () func [arg]) =
do
def <- Lens.view envMode <&> useGlobal
let arrayLit (JSS.CallExpr () cons [JSS.ObjectLit ()
[(k0, v0), (k1, JSS.FuncExpr () Nothing [_] [JSS.ReturnStmt () (Just v1)])]
])
| cons == def "_3a__3a_" && k0 == key "infixl" && k1 == key "infixr" =
arrayLit v1 <&> (v0 :)
| otherwise = Nothing
arrayLit (JSS.ObjectLit () [(k0, JSS.StringLit () "empty"), (k1, JSS.ObjectLit () [])])
| k0 == key "tag" && k1 == key "data" =
Just []
arrayLit _ = Nothing
let r
| func == def "toArray" =
arrayLit arg
& maybe x (JSS.ArrayLit ())
| func == def "map" =
-- Check mapping with "id" (when unwrapping nominals..)
case arg of
JSS.ObjectLit ()
[ (_, str)
, (k, JSS.FuncExpr () Nothing [lamParam] [JSS.ReturnStmt () (Just (JSS.VarRef () lamRet))])
] | k == key "mapping" && lamParam == lamRet
-> str
_ -> x
| otherwise = x
pure r
where
key n = JSS.PropId () (JSS.Id () n)
optimizeExpr (JSS.FuncExpr () Nothing [param] [JSS.ReturnStmt () (Just (JSS.CallExpr () func [JSS.VarRef () var]))])
-- eta reduce: \x -> f x =~~> f
| param == var = pure func
optimizeExpr x = pure x
compileLeaf :: Monad m => ValId -> V.Leaf -> M m CodeGen
compileLeaf valId x =
case x of
V.LHole -> throwErr valId ER.ReachedHole
V.LRecEmpty -> JS.object [] & codeGenFromExpr & pure
V.LAbsurd -> throwErr valId ER.UnhandledCase
V.LVar var -> compileVar valId var >>= maybeLogSubexprResult valId
V.LLiteral literal -> compileLiteral literal & pure
V.LFromNom {} ->
lam "x" (pure . (:[]) . JSS.ReturnStmt () . Just) <&> codeGenFromExpr
V.LGetField t -> compileGetField t
V.LInject t -> compileInject t
compileToNom ::
Monad m =>
IsTailCall -> ToNom T.NominalId V.Term # Annotated ValId -> M m CodeGen
compileToNom isTail (ToNom tId x) =
case x ^? ExprLens.valLiteral <&> PrimVal.toKnown of
Just (PrimVal.Bytes bytes)
| tId == Builtins.textTid
&& all (< 128) (BS.unpack bytes) ->
-- The JS is more readable with string constants
rts "bytesFromAscii" $$ JS.string (Text.unpack (decodeUtf8 bytes))
& codeGenFromExpr & pure
_ -> compileVal isTail x
compileVal :: Monad m => IsTailCall -> Val ValId -> M m CodeGen
compileVal isTail (Ann (Const valId) body) =
case body of
V.BLeaf x -> compileLeaf valId x
V.BApp x -> compileApply isTail valId x
V.BLam x -> compileLambda valId x
V.BRecExtend x -> compileRecExtend x
V.BCase x -> compileCase valId x
V.BToNom x -> compileToNom isTail x
|
lamdu/lamdu
|
src/Lamdu/Eval/JS/Compiler.hs
|
gpl-3.0
| 26,635 | 0 | 24 | 7,884 | 8,222 | 4,225 | 3,997 | -1 | -1 |
module Examples where
import Music (Name(..), Accidental(..), Scale(..), Tuning(..), Timing(..), Metronome(..),
AbstractInt1(..), AbstractPitch1(..), AbstractDur1(..),
AbstractInt2(..), AbstractPitch2(..), AbstractDur2(..),
AbstractInt3(..), AbstractPitch3(..), AbstractDur3(..),
Name(..), Number(..), Accidental(..), Quality(..),
Pitch(..), Interval(..),
Transpose(..),
AbstractNote(..), Note1, Note2, Note3,
AbstractPhrase(..),
Degree(..), Ficta(..), noteToSound,
apPitch, chord,
mapPhrase, absolute, normalise, faInt, faPitch, Music(..), mapMusic)
import Tuning
import FiveLimit (JustTuning(..), JustPitch(..), JustInt(..), ForceJustTuning(..))
import Scales (minor, major, HarmonicMinor(..), Minor, Major,
harmonicminor, infiniteScale, chromaticScale)
import Util (allPairs, interleave)
import Shortcuts
-- import LilyPrint
import Output
-- example notes:
fsharp = pitch F (Sh Na)
gsharp = pitch G (Sh Na)
gsharp' = pitch (Up G) (Sh Na)
n1 :: Note2
n1 = AbstractPitch fsharp quaver
n2 :: Note2
n2 = AbstractInt d5 quaver
n3 :: Note2
n3 = Rest quaver
somefreq = AbstractPitch3 4.52
somelength = AbstractDur3 4.56
n4 :: Note3
n4 = AbstractPitch somefreq somelength
-- example scales:
cmajor = major (pitch C Na)
bflatmajor = major (pitch B (Fl Na))
dminor = minor (pitch D Na)
csharpminor = minor (pitch C (Sh Na))
dsharpminor = harmonicminor (pitch D (Sh Na))
longquavercmajscale = phrase $ zipWith note (take 22 $ infiniteScale cmajor) (repeat quaver)
play_longquavercmajscale = playCsound $ mapPhrase (noteToSound et me) longquavercmajscale
-- example tuning systems:
p = pythagorean (pitch A Na, AbstractPitch3 440.0)
et = equal (pitch A Na, AbstractPitch3 440.0)
qc = qcmeantone (pitch A Na, AbstractPitch3 440.0)
me = Metronome 240
-- tuning some scales:
frequencies = map (tune et) (scale cmajor)
frequencies' = map (tune qc) (scale csharpminor)
---- Construct a tune from scale degrees (this is pretty unwieldy)
notes = [AbstractPitch1 TO Neutral,
AbstractPitch1 DO Neutral,
AbstractPitch1 ME Neutral,
AbstractPitch1 TO Neutral,
AbstractPitch1 (DDown LN) Neutral,
AbstractPitch1 TO Neutral,
AbstractPitch1 ST Neutral,
AbstractPitch1 ME Neutral,
AbstractPitch1 SD Neutral,
AbstractPitch1 ME Neutral,
AbstractPitch1 ST Neutral,
AbstractPitch1 TO Neutral]
durs = [minim, minim, minim, minim, minim, crotchet, crotchet, tie minim quaver, quaver, quaver, quaver, crotchet]
notes1 = AbstractPhrase $ zipWith AbstractPitch (map (applyScale cmajor) notes) durs
notes2 = AbstractPhrase $ zipWith AbstractPitch (map (applyScale (harmonicminor (pitch D Na))) notes) durs
---- Some simple polyphony
cnotes1 = [g, a, b, c, c, c, b, c]
cnotes2 = [e, f, d, e, d, e, d, e]
cnotes3 = map (.-^ _P8) [c, f, gis, a, fis, g, g, c]
chords = Voices $ map (\p -> phrase $ zipWith note p (repeat minim)) [cnotes1, cnotes2, cnotes3]
-- note the arguments to noteToSound: 'et' is a tuning system, 'me' is a timing.
chordsounds = mapMusic (mapPhrase (noteToSound et me)) chords
-- and now to hear it through your speakers
playchordsounds = playCsounds chordsounds
----------------
pos = [0..]
neg = map (*(-1)) [1..]
ints = interleave pos neg
pairs = allPairs ints ints
intervals = map (\(a,d) -> a *^ _A1 ^+^ d *^ d2) pairs
|
ejlilley/AbstractMusic
|
Examples.hs
|
gpl-3.0
| 3,504 | 0 | 14 | 752 | 1,220 | 707 | 513 | 72 | 1 |
{-# OPTIONS -Wall -fsimpl-tick-factor=1024 #-}
{-# LANGUAGE OverloadedStrings #-}
import qualified Web.Twitter.Conduit as WTC
import qualified Web.Authenticate.OAuth as WAO
import qualified Control.Monad.Logger as CML
import qualified System.Environment as SE
import qualified Control.Lens as CL
import qualified Control.Monad.IO.Class as CMIC
import qualified Data.Conduit as DC
import qualified Data.Conduit.List as DCL
import qualified Data.ByteString.Char8 as DBC
import qualified Data.Text as DT
import qualified Data.Monoid as DM
tokens :: String -> String -> WAO.OAuth
tokens ck cs = WTC.twitterOAuth
{
WAO.oauthConsumerKey = DBC.pack ck ,
WAO.oauthConsumerSecret = DBC.pack cs
}
credential :: String -> String -> WAO.Credential
credential t ts = WAO.Credential
[
( "oauth_token" , DBC.pack t ) ,
( "oauth_token_secret" , DBC.pack ts )
]
twInfo :: String -> String -> String -> String -> WTC.TWInfo
twInfo ck cs t ts = WTC.setCredential ( tokens ck cs ) ( credential t ts ) WAO.def
mirroring sn ( WTC.SStatus status ) = do
if status CL.^. WTC.statusUser CL.^. WTC.userScreenName == DT.pack sn
then do
_ <- WTC.call $ WTC.update $ status CL.^. WTC.statusText
return ( )
else return ( )
mirroring _ _ = return ( )
main :: IO ( )
main = do
ck <- SE.getEnv "YOUR_CONSUMER_KEY"
cs <- SE.getEnv "YOUR_CONSUMER_SECRET"
at <- SE.getEnv "YOUR_ACCESS_TOKEN"
ats <- SE.getEnv "YOUR_ACCESS_TOKEN_SECRET"
sn <- SE.getEnv "MIRRORING_ACCOUNT_SCREEN_NAME"
CML.runNoLoggingT . WTC.runTW ( twInfo ck cs at ats ) $ do
userId <- WTC.call $ WTC.usersShow $ WTC.ScreenNameParam sn
src <- WTC.stream $ WTC.statusesFilterByFollow [ userId CL.^. WTC.userId ]
src DC.$$+- DCL.mapM_ ( CL.^! CL.act ( mirroring sn ) )
|
minamiyama1994/mirror-tweet
|
Main.hs
|
gpl-3.0
| 1,774 | 0 | 15 | 331 | 576 | 307 | 269 | 42 | 2 |
module Data.Dns.Types (
DnsType(..)
, DnsClass(..)
, DnsRecord(..)
, DomainName(..)
, HinfoCPU(..)
, HinfoOS(..)
, SOASerial(..)
, SOAExpire(..)
, SOARefresh(..)
, SOARetry(..)
, SOAMinimum(..)
, SOAMName(..)
, SOARName(..)
) where
import Data.ByteString
import Data.Word
-- my naming scheme is *glorious*
newtype DomainName = DomainName {unDN :: ByteString} deriving (Show, Eq)
newtype HinfoCPU = HinfoCPU {unHinfoCPU :: ByteString} deriving (Show, Eq)
newtype HinfoOS = HinfoOS {unHinfoOS :: ByteString} deriving (Show, Eq)
newtype SOASerial = SOASerial {unSerial :: Word32} deriving (Show, Eq)
newtype SOARefresh = SOARefresh {unRefresh :: Word32} deriving (Show, Eq)
newtype SOAExpire = SOAExpire {unExpire :: Word32} deriving (Show, Eq)
newtype SOARetry = SOARetry {unRetry :: Word32} deriving (Show, Eq)
newtype SOAMinimum = SOAMinimum {unMinimum :: Word32} deriving (Show, Eq)
newtype SOAMName = SOAMName DomainName deriving (Show, Eq)
newtype SOARName = SOARName DomainName deriving (Show, Eq)
data DnsType = ARecord Word32
| NSRecord DomainName
| CNAMERecord DomainName
| SOARecord SOAMName SOARName SOASerial SOARefresh SOARetry SOAExpire SOAMinimum
| PTRRecord DomainName
| HINFORecord HinfoCPU HinfoOS
| MXRecord Word16 DomainName
| TXTRecord ByteString
| UnknownRecord deriving (Show, Eq)
data DnsClass = INClass
| UnknownClass deriving (Show, Eq)
data DnsRecord = DnsRecord { dnsName :: ByteString
, dnsType :: DnsType
, dnsClass :: DnsClass
, dnsTTL :: Word32
, dnsRdLength :: Word16
, dnsRdData :: ByteString
} deriving (Show, Eq)
|
cabrera/haskell-parsing-tutorial
|
src/Data/Dns/Types.hs
|
gpl-3.0
| 1,860 | 0 | 8 | 529 | 492 | 309 | 183 | 44 | 0 |
module HSubst where
import Data.Void
import Data.Bifunctor
import Language
import Context
--------------------
--- Hereditary Substitution
--------------------
-- Hereditary Substitution is mostly functor-preserving
-- (except for `Ne`utral terms which are turned into
-- Normal ones) so we introduce this type alias.
type HSubst f a b c d =
Subst a b c d -> Renaming a b c d -> f a b -> f c d
-- Now we can give a (rather) compact type to the combinator
-- lifting hereditary substitution to scopes
hSubstScopeDa :: (Eq a, Eq b, Eq c, Eq d) =>
HSubst f (Maybe a) b (Maybe c) d -> HSubst (ScopeDa f) a b c d
hSubstScopeDa hS vu ren (ScopeDa sc) = ScopeDa $ hS vu' ren' sc
where vu' = wkSubstDa vu
ren' = KeepItDa ren
hSubstScopeTm :: (Eq a, Eq b, Eq c, Eq d) =>
HSubst f a (Maybe b) c (Maybe d) -> HSubst (ScopeTm f) a b c d
hSubstScopeTm hS vu ren (ScopeTm sc) = ScopeTm $ hS vu' ren' sc
where vu' = wkSubstTm vu
ren' = KeepItTm ren
hSubstTy :: (Eq a, Eq b, Eq c, Eq d) => HSubst (Ty Ne) a b c d
hSubstTy _ _ TySet = TySet
hSubstTy _ _ TyDat = TyDat
hSubstTy _ _ TyZro = TyZro
hSubstTy _ _ TyOne = TyOne
hSubstTy _ _ TyTwo = TyTwo
hSubstTy vu ren (TySig s t) = TySig s' t'
where s' = hSubstTy vu ren s
t' = hSubstScopeTm hSubstTy vu ren t
hSubstTy vu ren (TyAbs s t) = TyAbs s' t'
where s' = hSubstTy vu ren s
t' = hSubstScopeTm hSubstTy vu ren t
hSubstTy vu ren (TyVar w) = hSubstRec vu ren w
hSubstTy vu ren (TyRec d) = TyRec $ hSubstScopeDa hSubstTy vu ren d
hSubstTy vu ren (TyElt t) = tyElt $ hSubstNe vu ren t
hSubstNf :: (Eq a, Eq b, Eq c, Eq d) => HSubst Nf a b c d
hSubstNf vu ren (NfAbs b) = NfAbs $ hSubstScopeTm hSubstNf vu ren b
hSubstNf vu ren (NfNeu ne) = hSubstNe vu ren ne
hSubstNf vu ren (NfTyp ty) = NfTyp $ hSubstTy vu ren ty
hSubstNf vu ren (NfInM t) = NfInM $ hSubstNf vu ren t
hSubstNf _ _ NfOne = NfOne
hSubstNf _ _ NfTru = NfTru
hSubstNf _ _ NfFls = NfFls
hSubstNf vu ren (NfSig a b) = NfSig a' b'
where a' = hSubstNf vu ren a
b' = hSubstNf vu ren b
hSubstNe :: (Eq a, Eq b, Eq c, Eq d) =>
Subst a b c d -> Renaming a b c d -> Ne a b -> Nf c d
hSubstNe wu ren (Ne v sp) = v' `hApp` sp'
where v' = hSubstVar wu ren v
sp' = hSubstSp wu ren sp
hSubstSp :: (Eq a, Eq b, Eq c, Eq d) => HSubst (Sp Nf Ne) a b c d
hSubstSp vu ren (Sp sp) = Sp $ fmap (hSubstElim vu ren) sp
hSubstElim :: (Eq a, Eq b, Eq c, Eq d) => HSubst (Elim Nf Ne) a b c d
hSubstElim vu ren (ElimApp t) = ElimApp $ hSubstNf vu ren t
hSubstElim _ _ ElimPr1 = ElimPr1
hSubstElim _ _ ElimPr2 = ElimPr2
hSubstElim vu ren (ElimBot ty) = ElimBot $ hSubstTy vu ren ty
hSubstElim vu ren (ElimTwo ty t f) = ElimTwo ty' (hNf t) (hNf f)
where hNf = hSubstNf vu ren
ty' = hSubstTy vu ren ty
hSubstElim vu ren (ElimRec d ty a) = ElimRec d' ty' a'
where d' = hSubstScopeDa hSubstTy vu ren d
ty' = hSubstTy vu ren ty
a' = hSubstNf vu ren a
hSubstRec :: (Eq a, Eq b, Eq c, Eq d) =>
Subst a b c d -> Renaming a b c d -> a -> Ty Ne c d
hSubstRec (SubstTm v u) ren w = TyVar $ renameDa ren w
hSubstRec (SubstDa v u) ren w
| v == w = u
| otherwise = TyVar $ renameDa ren w
hSubstVar :: (Eq a, Eq b, Eq c, Eq d) =>
Subst a b c d -> Renaming a b c d -> b -> Nf c d
hSubstVar (SubstDa v u) ren w = varNf $ renameTm ren w
hSubstVar (SubstTm v u) ren w
| v == w = u
| otherwise = varNf $ renameTm ren w
hSubstTm :: (Eq a, Eq b) => ScopeTm Nf a b -> Nf a b -> Nf a b
hSubstTm b u = hSubstNf (SubstTm Nothing u) DropItTm $ outScopeTm b
hSubstDa :: (Eq a, Eq b) =>
ScopeDa (Ty Ne) a b -> Ty Ne a b -> Ty Ne a b
hSubstDa b u = hSubstTy (SubstDa Nothing u) DropItDa $ outScopeDa b
appNe :: Ne a b -> Nf a b -> Ne a b
appNe ne = elimNe ne . ElimApp
appNf :: (Eq a, Eq b) => Nf a b -> Nf a b -> Nf a b
appNf (NfAbs b) u = hSubstTm b u
appTy :: (Eq a, Eq b) => Ty Ne a b -> Nf a b -> Ty Ne a b
appTy (TyAbs _ b) u =
hSubstTy (SubstTm Nothing u) DropItTm $ outScopeTm b
funExt :: (Eq a, Eq b) =>
ScopeDa (Ty Ne) a b -> Ty Ne a b -> Ty Ne a b
funExt d x = hSubstTy (SubstDa Nothing x) DropItDa $ outScopeDa d
elimNe :: Ne a b -> Elim Nf Ne a b -> Ne a b
elimNe (Ne v (Sp sp)) elim = Ne v $ Sp $ sp ++ [elim]
proj1 :: Nf a b -> Nf a b
proj1 (NfSig a _) = a
proj1 (NfNeu ne) = NfNeu $ elimNe ne ElimPr1
proj2 :: Nf a b -> Nf a b
proj2 (NfSig _ b) = b
proj2 (NfNeu ne) = NfNeu $ elimNe ne ElimPr2
ifTE :: Ty Ne a b -> Nf a b -> Nf a b -> Nf a b -> Nf a b
ifTE _ NfTru t _ = t
ifTE _ NfFls _ f = f
ifTE ty (NfNeu ne) t f = NfNeu $ elimNe ne (ElimTwo ty t f)
below :: (Eq a, Eq b) =>
ScopeDa (Ty Ne) a b -> Ty Ne a b -> Ty Ne (Maybe a) b ->
Nf a b -> Nf a b -> Nf a b
below d ty TyOne ih x = NfOne
below d ty pi@(TyAbs a b) ih f =
lamNf $ below d' (wkTy ty) b' (wkNf ih) fa
where d' = ScopeDa $ wkTy $ outScopeDa d
b' = wkTy pi `appTy` varNf Nothing
fa = wkNf f `appNf` varNf Nothing
below d ty (TySig a b) ih p = below d ty b' ih $ proj2 p
where b' = hSubstTy (SubstTm Nothing $ wkNfDa $ proj1 p)
DropItTm $ outScopeTm b
below d ty (TyVar v) ih x = recNf d ty ih x
recNf :: (Eq a, Eq b) =>
ScopeDa (Ty Ne) a b -> Ty Ne a b -> Nf a b -> Nf a b -> Nf a b
recNf d ty ih (NfNeu x) = NfNeu $ x `elimNe` ElimRec d ty ih
recNf d ty ih (NfInM x) =
ih `appNf` x `appNf` below d ty (outScopeDa d) ih x
elimNf :: (Eq a, Eq b) => Nf a b -> Elim Nf Ne a b -> Nf a b
elimNf nf (ElimApp u) = nf `appNf` u
elimNf nf ElimPr1 = proj1 nf
elimNf nf ElimPr2 = proj2 nf
elimNf (NfNeu ne) el@(ElimBot _) = NfNeu $ ne `elimNe` el
elimNf nf (ElimTwo ty t f) = ifTE ty nf t f
elimNf nf (ElimRec d ty alg) = recNf d ty alg nf
hApp :: (Eq a, Eq b) => Nf a b -> Sp Nf Ne a b -> Nf a b
hApp nf (Sp sp) = foldl elimNf nf sp
|
gallais/potpourri
|
haskell/hsubst/HSubst.hs
|
gpl-3.0
| 5,934 | 0 | 12 | 1,733 | 3,114 | 1,535 | 1,579 | 128 | 1 |
<h3>Example 48 (palettes with ng-repeat)</h3>
<palette ng-repeat="(nm,body) in pals" name="[[nm]]">
[[body]]
</palette>
<plot height=300 aspect=3 stroke-width=2 x="[[seq(0,4*PI,101)]]"
axis-x-label="Time" axis-y-label="sin(x) / cos(x)">
<lines y="[[sin(x)]]" stroke="[[bgr(y)]]"></lines>
<lines y="[[cos(x)]]" stroke="[[gyo(y)]]"></lines>
</plot>
|
openbrainsrc/hRadian
|
examples/Example/defunct/Eg48.hs
|
mpl-2.0
| 361 | 27 | 16 | 42 | 194 | 91 | 103 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.S3.Types.Product
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.AWS.S3.Types.Product where
import Network.AWS.Prelude
import Network.AWS.S3.Internal
import Network.AWS.S3.Types.Sum
-- | /See:/ 'accessControlPolicy' smart constructor.
data AccessControlPolicy = AccessControlPolicy'
{ _acpGrants :: !(Maybe [Grant])
, _acpOwner :: !(Maybe Owner)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'AccessControlPolicy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'acpGrants'
--
-- * 'acpOwner'
accessControlPolicy
:: AccessControlPolicy
accessControlPolicy =
AccessControlPolicy'
{ _acpGrants = Nothing
, _acpOwner = Nothing
}
-- | A list of grants.
acpGrants :: Lens' AccessControlPolicy [Grant]
acpGrants = lens _acpGrants (\ s a -> s{_acpGrants = a}) . _Default . _Coerce;
-- | Undocumented member.
acpOwner :: Lens' AccessControlPolicy (Maybe Owner)
acpOwner = lens _acpOwner (\ s a -> s{_acpOwner = a});
instance ToXML AccessControlPolicy where
toXML AccessControlPolicy'{..}
= mconcat
["AccessControlList" @=
toXML (toXMLList "Grant" <$> _acpGrants),
"Owner" @= _acpOwner]
-- | /See:/ 'bucket' smart constructor.
data Bucket = Bucket'
{ _bCreationDate :: !RFC822
, _bName :: !BucketName
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Bucket' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bCreationDate'
--
-- * 'bName'
bucket
:: UTCTime -- ^ 'bCreationDate'
-> BucketName -- ^ 'bName'
-> Bucket
bucket pCreationDate_ pName_ =
Bucket'
{ _bCreationDate = _Time # pCreationDate_
, _bName = pName_
}
-- | Date the bucket was created.
bCreationDate :: Lens' Bucket UTCTime
bCreationDate = lens _bCreationDate (\ s a -> s{_bCreationDate = a}) . _Time;
-- | The name of the bucket.
bName :: Lens' Bucket BucketName
bName = lens _bName (\ s a -> s{_bName = a});
instance FromXML Bucket where
parseXML x
= Bucket' <$> (x .@ "CreationDate") <*> (x .@ "Name")
-- | /See:/ 'bucketLifecycleConfiguration' smart constructor.
newtype BucketLifecycleConfiguration = BucketLifecycleConfiguration'
{ _blcRules :: [LifecycleRule]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'BucketLifecycleConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'blcRules'
bucketLifecycleConfiguration
:: BucketLifecycleConfiguration
bucketLifecycleConfiguration =
BucketLifecycleConfiguration'
{ _blcRules = mempty
}
-- | Undocumented member.
blcRules :: Lens' BucketLifecycleConfiguration [LifecycleRule]
blcRules = lens _blcRules (\ s a -> s{_blcRules = a}) . _Coerce;
instance ToXML BucketLifecycleConfiguration where
toXML BucketLifecycleConfiguration'{..}
= mconcat [toXMLList "Rule" _blcRules]
-- | /See:/ 'bucketLoggingStatus' smart constructor.
newtype BucketLoggingStatus = BucketLoggingStatus'
{ _blsLoggingEnabled :: Maybe LoggingEnabled
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'BucketLoggingStatus' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'blsLoggingEnabled'
bucketLoggingStatus
:: BucketLoggingStatus
bucketLoggingStatus =
BucketLoggingStatus'
{ _blsLoggingEnabled = Nothing
}
-- | Undocumented member.
blsLoggingEnabled :: Lens' BucketLoggingStatus (Maybe LoggingEnabled)
blsLoggingEnabled = lens _blsLoggingEnabled (\ s a -> s{_blsLoggingEnabled = a});
instance ToXML BucketLoggingStatus where
toXML BucketLoggingStatus'{..}
= mconcat ["LoggingEnabled" @= _blsLoggingEnabled]
-- | /See:/ 'corsConfiguration' smart constructor.
newtype CORSConfiguration = CORSConfiguration'
{ _ccCORSRules :: [CORSRule]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CORSConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ccCORSRules'
corsConfiguration
:: CORSConfiguration
corsConfiguration =
CORSConfiguration'
{ _ccCORSRules = mempty
}
-- | Undocumented member.
ccCORSRules :: Lens' CORSConfiguration [CORSRule]
ccCORSRules = lens _ccCORSRules (\ s a -> s{_ccCORSRules = a}) . _Coerce;
instance ToXML CORSConfiguration where
toXML CORSConfiguration'{..}
= mconcat [toXMLList "CORSRule" _ccCORSRules]
-- | /See:/ 'corsRule' smart constructor.
data CORSRule = CORSRule'
{ _crMaxAgeSeconds :: !(Maybe Int)
, _crAllowedHeaders :: !(Maybe [Text])
, _crExposeHeaders :: !(Maybe [Text])
, _crAllowedMethods :: ![Text]
, _crAllowedOrigins :: ![Text]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CORSRule' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'crMaxAgeSeconds'
--
-- * 'crAllowedHeaders'
--
-- * 'crExposeHeaders'
--
-- * 'crAllowedMethods'
--
-- * 'crAllowedOrigins'
corsRule
:: CORSRule
corsRule =
CORSRule'
{ _crMaxAgeSeconds = Nothing
, _crAllowedHeaders = Nothing
, _crExposeHeaders = Nothing
, _crAllowedMethods = mempty
, _crAllowedOrigins = mempty
}
-- | The time in seconds that your browser is to cache the preflight response
-- for the specified resource.
crMaxAgeSeconds :: Lens' CORSRule (Maybe Int)
crMaxAgeSeconds = lens _crMaxAgeSeconds (\ s a -> s{_crMaxAgeSeconds = a});
-- | Specifies which headers are allowed in a pre-flight OPTIONS request.
crAllowedHeaders :: Lens' CORSRule [Text]
crAllowedHeaders = lens _crAllowedHeaders (\ s a -> s{_crAllowedHeaders = a}) . _Default . _Coerce;
-- | One or more headers in the response that you want customers to be able
-- to access from their applications (for example, from a JavaScript
-- XMLHttpRequest object).
crExposeHeaders :: Lens' CORSRule [Text]
crExposeHeaders = lens _crExposeHeaders (\ s a -> s{_crExposeHeaders = a}) . _Default . _Coerce;
-- | Identifies HTTP methods that the domain\/origin specified in the rule is
-- allowed to execute.
crAllowedMethods :: Lens' CORSRule [Text]
crAllowedMethods = lens _crAllowedMethods (\ s a -> s{_crAllowedMethods = a}) . _Coerce;
-- | One or more origins you want customers to be able to access the bucket
-- from.
crAllowedOrigins :: Lens' CORSRule [Text]
crAllowedOrigins = lens _crAllowedOrigins (\ s a -> s{_crAllowedOrigins = a}) . _Coerce;
instance FromXML CORSRule where
parseXML x
= CORSRule' <$>
(x .@? "MaxAgeSeconds") <*>
(may (parseXMLList "AllowedHeader") x)
<*> (may (parseXMLList "ExposeHeader") x)
<*> (parseXMLList "AllowedMethod" x)
<*> (parseXMLList "AllowedOrigin" x)
instance ToXML CORSRule where
toXML CORSRule'{..}
= mconcat
["MaxAgeSeconds" @= _crMaxAgeSeconds,
toXML
(toXMLList "AllowedHeader" <$> _crAllowedHeaders),
toXML
(toXMLList "ExposeHeader" <$> _crExposeHeaders),
toXMLList "AllowedMethod" _crAllowedMethods,
toXMLList "AllowedOrigin" _crAllowedOrigins]
-- | /See:/ 'commonPrefix' smart constructor.
newtype CommonPrefix = CommonPrefix'
{ _cpPrefix :: Maybe Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CommonPrefix' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cpPrefix'
commonPrefix
:: CommonPrefix
commonPrefix =
CommonPrefix'
{ _cpPrefix = Nothing
}
-- | Undocumented member.
cpPrefix :: Lens' CommonPrefix (Maybe Text)
cpPrefix = lens _cpPrefix (\ s a -> s{_cpPrefix = a});
instance FromXML CommonPrefix where
parseXML x = CommonPrefix' <$> (x .@? "Prefix")
-- | /See:/ 'completedMultipartUpload' smart constructor.
newtype CompletedMultipartUpload = CompletedMultipartUpload'
{ _cmuParts :: Maybe (List1 CompletedPart)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CompletedMultipartUpload' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cmuParts'
completedMultipartUpload
:: CompletedMultipartUpload
completedMultipartUpload =
CompletedMultipartUpload'
{ _cmuParts = Nothing
}
-- | Undocumented member.
cmuParts :: Lens' CompletedMultipartUpload (Maybe (NonEmpty CompletedPart))
cmuParts = lens _cmuParts (\ s a -> s{_cmuParts = a}) . mapping _List1;
instance ToXML CompletedMultipartUpload where
toXML CompletedMultipartUpload'{..}
= mconcat [toXML (toXMLList "Part" <$> _cmuParts)]
-- | /See:/ 'completedPart' smart constructor.
data CompletedPart = CompletedPart'
{ _cpPartNumber :: !Int
, _cpETag :: !ETag
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CompletedPart' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cpPartNumber'
--
-- * 'cpETag'
completedPart
:: Int -- ^ 'cpPartNumber'
-> ETag -- ^ 'cpETag'
-> CompletedPart
completedPart pPartNumber_ pETag_ =
CompletedPart'
{ _cpPartNumber = pPartNumber_
, _cpETag = pETag_
}
-- | Part number that identifies the part. This is a positive integer between
-- 1 and 10,000.
cpPartNumber :: Lens' CompletedPart Int
cpPartNumber = lens _cpPartNumber (\ s a -> s{_cpPartNumber = a});
-- | Entity tag returned when the part was uploaded.
cpETag :: Lens' CompletedPart ETag
cpETag = lens _cpETag (\ s a -> s{_cpETag = a});
instance ToXML CompletedPart where
toXML CompletedPart'{..}
= mconcat
["PartNumber" @= _cpPartNumber, "ETag" @= _cpETag]
-- | /See:/ 'condition' smart constructor.
data Condition = Condition'
{ _cKeyPrefixEquals :: !(Maybe Text)
, _cHTTPErrorCodeReturnedEquals :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Condition' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cKeyPrefixEquals'
--
-- * 'cHTTPErrorCodeReturnedEquals'
condition
:: Condition
condition =
Condition'
{ _cKeyPrefixEquals = Nothing
, _cHTTPErrorCodeReturnedEquals = Nothing
}
-- | The object key name prefix when the redirect is applied. For example, to
-- redirect requests for ExamplePage.html, the key prefix will be
-- ExamplePage.html. To redirect request for all pages with the prefix
-- docs\/, the key prefix will be \/docs, which identifies all objects in
-- the docs\/ folder. Required when the parent element Condition is
-- specified and sibling HttpErrorCodeReturnedEquals is not specified. If
-- both conditions are specified, both must be true for the redirect to be
-- applied.
cKeyPrefixEquals :: Lens' Condition (Maybe Text)
cKeyPrefixEquals = lens _cKeyPrefixEquals (\ s a -> s{_cKeyPrefixEquals = a});
-- | The HTTP error code when the redirect is applied. In the event of an
-- error, if the error code equals this value, then the specified redirect
-- is applied. Required when parent element Condition is specified and
-- sibling KeyPrefixEquals is not specified. If both are specified, then
-- both must be true for the redirect to be applied.
cHTTPErrorCodeReturnedEquals :: Lens' Condition (Maybe Text)
cHTTPErrorCodeReturnedEquals = lens _cHTTPErrorCodeReturnedEquals (\ s a -> s{_cHTTPErrorCodeReturnedEquals = a});
instance FromXML Condition where
parseXML x
= Condition' <$>
(x .@? "KeyPrefixEquals") <*>
(x .@? "HttpErrorCodeReturnedEquals")
instance ToXML Condition where
toXML Condition'{..}
= mconcat
["KeyPrefixEquals" @= _cKeyPrefixEquals,
"HttpErrorCodeReturnedEquals" @=
_cHTTPErrorCodeReturnedEquals]
-- | /See:/ 'copyObjectResult' smart constructor.
data CopyObjectResult = CopyObjectResult'
{ _corETag :: !(Maybe ETag)
, _corLastModified :: !(Maybe RFC822)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CopyObjectResult' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'corETag'
--
-- * 'corLastModified'
copyObjectResult
:: CopyObjectResult
copyObjectResult =
CopyObjectResult'
{ _corETag = Nothing
, _corLastModified = Nothing
}
-- | Undocumented member.
corETag :: Lens' CopyObjectResult (Maybe ETag)
corETag = lens _corETag (\ s a -> s{_corETag = a});
-- | Undocumented member.
corLastModified :: Lens' CopyObjectResult (Maybe UTCTime)
corLastModified = lens _corLastModified (\ s a -> s{_corLastModified = a}) . mapping _Time;
instance FromXML CopyObjectResult where
parseXML x
= CopyObjectResult' <$>
(x .@? "ETag") <*> (x .@? "LastModified")
-- | /See:/ 'copyPartResult' smart constructor.
data CopyPartResult = CopyPartResult'
{ _cprETag :: !(Maybe ETag)
, _cprLastModified :: !(Maybe RFC822)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CopyPartResult' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cprETag'
--
-- * 'cprLastModified'
copyPartResult
:: CopyPartResult
copyPartResult =
CopyPartResult'
{ _cprETag = Nothing
, _cprLastModified = Nothing
}
-- | Entity tag of the object.
cprETag :: Lens' CopyPartResult (Maybe ETag)
cprETag = lens _cprETag (\ s a -> s{_cprETag = a});
-- | Date and time at which the object was uploaded.
cprLastModified :: Lens' CopyPartResult (Maybe UTCTime)
cprLastModified = lens _cprLastModified (\ s a -> s{_cprLastModified = a}) . mapping _Time;
instance FromXML CopyPartResult where
parseXML x
= CopyPartResult' <$>
(x .@? "ETag") <*> (x .@? "LastModified")
-- | /See:/ 'createBucketConfiguration' smart constructor.
newtype CreateBucketConfiguration = CreateBucketConfiguration'
{ _cbcLocationConstraint :: Maybe Region
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreateBucketConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cbcLocationConstraint'
createBucketConfiguration
:: CreateBucketConfiguration
createBucketConfiguration =
CreateBucketConfiguration'
{ _cbcLocationConstraint = Nothing
}
-- | Specifies the region where the bucket will be created. If you don\'t
-- specify a region, the bucket will be created in US Standard.
cbcLocationConstraint :: Lens' CreateBucketConfiguration (Maybe Region)
cbcLocationConstraint = lens _cbcLocationConstraint (\ s a -> s{_cbcLocationConstraint = a});
instance ToXML CreateBucketConfiguration where
toXML CreateBucketConfiguration'{..}
= mconcat
["LocationConstraint" @= _cbcLocationConstraint]
-- | /See:/ 'delete'' smart constructor.
data Delete = Delete'
{ _dQuiet :: !(Maybe Bool)
, _dObjects :: ![ObjectIdentifier]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Delete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dQuiet'
--
-- * 'dObjects'
delete'
:: Delete
delete' =
Delete'
{ _dQuiet = Nothing
, _dObjects = mempty
}
-- | Element to enable quiet mode for the request. When you add this element,
-- you must set its value to true.
dQuiet :: Lens' Delete (Maybe Bool)
dQuiet = lens _dQuiet (\ s a -> s{_dQuiet = a});
-- | Undocumented member.
dObjects :: Lens' Delete [ObjectIdentifier]
dObjects = lens _dObjects (\ s a -> s{_dObjects = a}) . _Coerce;
instance ToXML Delete where
toXML Delete'{..}
= mconcat
["Quiet" @= _dQuiet, toXMLList "Object" _dObjects]
-- | /See:/ 'deleteMarkerEntry' smart constructor.
data DeleteMarkerEntry = DeleteMarkerEntry'
{ _dmeVersionId :: !(Maybe ObjectVersionId)
, _dmeIsLatest :: !(Maybe Bool)
, _dmeOwner :: !(Maybe Owner)
, _dmeKey :: !(Maybe ObjectKey)
, _dmeLastModified :: !(Maybe RFC822)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteMarkerEntry' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dmeVersionId'
--
-- * 'dmeIsLatest'
--
-- * 'dmeOwner'
--
-- * 'dmeKey'
--
-- * 'dmeLastModified'
deleteMarkerEntry
:: DeleteMarkerEntry
deleteMarkerEntry =
DeleteMarkerEntry'
{ _dmeVersionId = Nothing
, _dmeIsLatest = Nothing
, _dmeOwner = Nothing
, _dmeKey = Nothing
, _dmeLastModified = Nothing
}
-- | Version ID of an object.
dmeVersionId :: Lens' DeleteMarkerEntry (Maybe ObjectVersionId)
dmeVersionId = lens _dmeVersionId (\ s a -> s{_dmeVersionId = a});
-- | Specifies whether the object is (true) or is not (false) the latest
-- version of an object.
dmeIsLatest :: Lens' DeleteMarkerEntry (Maybe Bool)
dmeIsLatest = lens _dmeIsLatest (\ s a -> s{_dmeIsLatest = a});
-- | Undocumented member.
dmeOwner :: Lens' DeleteMarkerEntry (Maybe Owner)
dmeOwner = lens _dmeOwner (\ s a -> s{_dmeOwner = a});
-- | The object key.
dmeKey :: Lens' DeleteMarkerEntry (Maybe ObjectKey)
dmeKey = lens _dmeKey (\ s a -> s{_dmeKey = a});
-- | Date and time the object was last modified.
dmeLastModified :: Lens' DeleteMarkerEntry (Maybe UTCTime)
dmeLastModified = lens _dmeLastModified (\ s a -> s{_dmeLastModified = a}) . mapping _Time;
instance FromXML DeleteMarkerEntry where
parseXML x
= DeleteMarkerEntry' <$>
(x .@? "VersionId") <*> (x .@? "IsLatest") <*>
(x .@? "Owner")
<*> (x .@? "Key")
<*> (x .@? "LastModified")
-- | /See:/ 'deletedObject' smart constructor.
data DeletedObject = DeletedObject'
{ _dVersionId :: !(Maybe ObjectVersionId)
, _dDeleteMarker :: !(Maybe Bool)
, _dDeleteMarkerVersionId :: !(Maybe Text)
, _dKey :: !(Maybe ObjectKey)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeletedObject' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dVersionId'
--
-- * 'dDeleteMarker'
--
-- * 'dDeleteMarkerVersionId'
--
-- * 'dKey'
deletedObject
:: DeletedObject
deletedObject =
DeletedObject'
{ _dVersionId = Nothing
, _dDeleteMarker = Nothing
, _dDeleteMarkerVersionId = Nothing
, _dKey = Nothing
}
-- | Undocumented member.
dVersionId :: Lens' DeletedObject (Maybe ObjectVersionId)
dVersionId = lens _dVersionId (\ s a -> s{_dVersionId = a});
-- | Undocumented member.
dDeleteMarker :: Lens' DeletedObject (Maybe Bool)
dDeleteMarker = lens _dDeleteMarker (\ s a -> s{_dDeleteMarker = a});
-- | Undocumented member.
dDeleteMarkerVersionId :: Lens' DeletedObject (Maybe Text)
dDeleteMarkerVersionId = lens _dDeleteMarkerVersionId (\ s a -> s{_dDeleteMarkerVersionId = a});
-- | Undocumented member.
dKey :: Lens' DeletedObject (Maybe ObjectKey)
dKey = lens _dKey (\ s a -> s{_dKey = a});
instance FromXML DeletedObject where
parseXML x
= DeletedObject' <$>
(x .@? "VersionId") <*> (x .@? "DeleteMarker") <*>
(x .@? "DeleteMarkerVersionId")
<*> (x .@? "Key")
-- | /See:/ 'destination' smart constructor.
data Destination = Destination'
{ _dStorageClass :: !(Maybe StorageClass)
, _dBucket :: !BucketName
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Destination' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dStorageClass'
--
-- * 'dBucket'
destination
:: BucketName -- ^ 'dBucket'
-> Destination
destination pBucket_ =
Destination'
{ _dStorageClass = Nothing
, _dBucket = pBucket_
}
-- | The class of storage used to store the object.
dStorageClass :: Lens' Destination (Maybe StorageClass)
dStorageClass = lens _dStorageClass (\ s a -> s{_dStorageClass = a});
-- | Amazon resource name (ARN) of the bucket where you want Amazon S3 to
-- store replicas of the object identified by the rule.
dBucket :: Lens' Destination BucketName
dBucket = lens _dBucket (\ s a -> s{_dBucket = a});
instance FromXML Destination where
parseXML x
= Destination' <$>
(x .@? "StorageClass") <*> (x .@ "Bucket")
instance ToXML Destination where
toXML Destination'{..}
= mconcat
["StorageClass" @= _dStorageClass,
"Bucket" @= _dBucket]
-- | /See:/ 'errorDocument' smart constructor.
newtype ErrorDocument = ErrorDocument'
{ _edKey :: ObjectKey
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ErrorDocument' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'edKey'
errorDocument
:: ObjectKey -- ^ 'edKey'
-> ErrorDocument
errorDocument pKey_ =
ErrorDocument'
{ _edKey = pKey_
}
-- | The object key name to use when a 4XX class error occurs.
edKey :: Lens' ErrorDocument ObjectKey
edKey = lens _edKey (\ s a -> s{_edKey = a});
instance FromXML ErrorDocument where
parseXML x = ErrorDocument' <$> (x .@ "Key")
instance ToXML ErrorDocument where
toXML ErrorDocument'{..} = mconcat ["Key" @= _edKey]
-- | Container for key value pair that defines the criteria for the filter
-- rule.
--
-- /See:/ 'filterRule' smart constructor.
data FilterRule = FilterRule'
{ _frValue :: !(Maybe Text)
, _frName :: !(Maybe FilterRuleName)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'FilterRule' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'frValue'
--
-- * 'frName'
filterRule
:: FilterRule
filterRule =
FilterRule'
{ _frValue = Nothing
, _frName = Nothing
}
-- | Undocumented member.
frValue :: Lens' FilterRule (Maybe Text)
frValue = lens _frValue (\ s a -> s{_frValue = a});
-- | Object key name prefix or suffix identifying one or more objects to
-- which the filtering rule applies. Maximum prefix length can be up to
-- 1,024 characters. Overlapping prefixes and suffixes are not supported.
-- For more information, go to
-- <http://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html Configuring Event Notifications>
-- in the Amazon Simple Storage Service Developer Guide.
frName :: Lens' FilterRule (Maybe FilterRuleName)
frName = lens _frName (\ s a -> s{_frName = a});
instance FromXML FilterRule where
parseXML x
= FilterRule' <$> (x .@? "Value") <*> (x .@? "Name")
instance ToXML FilterRule where
toXML FilterRule'{..}
= mconcat ["Value" @= _frValue, "Name" @= _frName]
-- | /See:/ 'grant' smart constructor.
data Grant = Grant'
{ _gPermission :: !(Maybe Permission)
, _gGrantee :: !(Maybe Grantee)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Grant' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gPermission'
--
-- * 'gGrantee'
grant
:: Grant
grant =
Grant'
{ _gPermission = Nothing
, _gGrantee = Nothing
}
-- | Specifies the permission given to the grantee.
gPermission :: Lens' Grant (Maybe Permission)
gPermission = lens _gPermission (\ s a -> s{_gPermission = a});
-- | Undocumented member.
gGrantee :: Lens' Grant (Maybe Grantee)
gGrantee = lens _gGrantee (\ s a -> s{_gGrantee = a});
instance FromXML Grant where
parseXML x
= Grant' <$>
(x .@? "Permission") <*> (x .@? "Grantee")
instance ToXML Grant where
toXML Grant'{..}
= mconcat
["Permission" @= _gPermission,
"Grantee" @= _gGrantee]
-- | /See:/ 'grantee' smart constructor.
data Grantee = Grantee'
{ _gURI :: !(Maybe Text)
, _gEmailAddress :: !(Maybe Text)
, _gDisplayName :: !(Maybe Text)
, _gId :: !(Maybe Text)
, _gType :: !Type
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Grantee' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gURI'
--
-- * 'gEmailAddress'
--
-- * 'gDisplayName'
--
-- * 'gId'
--
-- * 'gType'
grantee
:: Type -- ^ 'gType'
-> Grantee
grantee pType_ =
Grantee'
{ _gURI = Nothing
, _gEmailAddress = Nothing
, _gDisplayName = Nothing
, _gId = Nothing
, _gType = pType_
}
-- | URI of the grantee group.
gURI :: Lens' Grantee (Maybe Text)
gURI = lens _gURI (\ s a -> s{_gURI = a});
-- | Email address of the grantee.
gEmailAddress :: Lens' Grantee (Maybe Text)
gEmailAddress = lens _gEmailAddress (\ s a -> s{_gEmailAddress = a});
-- | Screen name of the grantee.
gDisplayName :: Lens' Grantee (Maybe Text)
gDisplayName = lens _gDisplayName (\ s a -> s{_gDisplayName = a});
-- | The canonical user ID of the grantee.
gId :: Lens' Grantee (Maybe Text)
gId = lens _gId (\ s a -> s{_gId = a});
-- | Type of grantee
gType :: Lens' Grantee Type
gType = lens _gType (\ s a -> s{_gType = a});
instance FromXML Grantee where
parseXML x
= Grantee' <$>
(x .@? "URI") <*> (x .@? "EmailAddress") <*>
(x .@? "DisplayName")
<*> (x .@? "ID")
<*> (x .@ "xsi:type")
instance ToXML Grantee where
toXML Grantee'{..}
= mconcat
["URI" @= _gURI, "EmailAddress" @= _gEmailAddress,
"DisplayName" @= _gDisplayName, "ID" @= _gId,
"xsi:type" @= _gType]
-- | /See:/ 'indexDocument' smart constructor.
newtype IndexDocument = IndexDocument'
{ _idSuffix :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'IndexDocument' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'idSuffix'
indexDocument
:: Text -- ^ 'idSuffix'
-> IndexDocument
indexDocument pSuffix_ =
IndexDocument'
{ _idSuffix = pSuffix_
}
-- | A suffix that is appended to a request that is for a directory on the
-- website endpoint (e.g. if the suffix is index.html and you make a
-- request to samplebucket\/images\/ the data that is returned will be for
-- the object with the key name images\/index.html) The suffix must not be
-- empty and must not include a slash character.
idSuffix :: Lens' IndexDocument Text
idSuffix = lens _idSuffix (\ s a -> s{_idSuffix = a});
instance FromXML IndexDocument where
parseXML x = IndexDocument' <$> (x .@ "Suffix")
instance ToXML IndexDocument where
toXML IndexDocument'{..}
= mconcat ["Suffix" @= _idSuffix]
-- | /See:/ 'initiator' smart constructor.
data Initiator = Initiator'
{ _iDisplayName :: !(Maybe Text)
, _iId :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Initiator' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'iDisplayName'
--
-- * 'iId'
initiator
:: Initiator
initiator =
Initiator'
{ _iDisplayName = Nothing
, _iId = Nothing
}
-- | Name of the Principal.
iDisplayName :: Lens' Initiator (Maybe Text)
iDisplayName = lens _iDisplayName (\ s a -> s{_iDisplayName = a});
-- | If the principal is an AWS account, it provides the Canonical User ID.
-- If the principal is an IAM User, it provides a user ARN value.
iId :: Lens' Initiator (Maybe Text)
iId = lens _iId (\ s a -> s{_iId = a});
instance FromXML Initiator where
parseXML x
= Initiator' <$>
(x .@? "DisplayName") <*> (x .@? "ID")
-- | Container for specifying the AWS Lambda notification configuration.
--
-- /See:/ 'lambdaFunctionConfiguration' smart constructor.
data LambdaFunctionConfiguration = LambdaFunctionConfiguration'
{ _lfcId :: !(Maybe Text)
, _lfcFilter :: !(Maybe NotificationConfigurationFilter)
, _lfcLambdaFunctionARN :: !Text
, _lfcEvents :: ![Event]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'LambdaFunctionConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lfcId'
--
-- * 'lfcFilter'
--
-- * 'lfcLambdaFunctionARN'
--
-- * 'lfcEvents'
lambdaFunctionConfiguration
:: Text -- ^ 'lfcLambdaFunctionARN'
-> LambdaFunctionConfiguration
lambdaFunctionConfiguration pLambdaFunctionARN_ =
LambdaFunctionConfiguration'
{ _lfcId = Nothing
, _lfcFilter = Nothing
, _lfcLambdaFunctionARN = pLambdaFunctionARN_
, _lfcEvents = mempty
}
-- | Undocumented member.
lfcId :: Lens' LambdaFunctionConfiguration (Maybe Text)
lfcId = lens _lfcId (\ s a -> s{_lfcId = a});
-- | Undocumented member.
lfcFilter :: Lens' LambdaFunctionConfiguration (Maybe NotificationConfigurationFilter)
lfcFilter = lens _lfcFilter (\ s a -> s{_lfcFilter = a});
-- | Lambda cloud function ARN that Amazon S3 can invoke when it detects
-- events of the specified type.
lfcLambdaFunctionARN :: Lens' LambdaFunctionConfiguration Text
lfcLambdaFunctionARN = lens _lfcLambdaFunctionARN (\ s a -> s{_lfcLambdaFunctionARN = a});
-- | Undocumented member.
lfcEvents :: Lens' LambdaFunctionConfiguration [Event]
lfcEvents = lens _lfcEvents (\ s a -> s{_lfcEvents = a}) . _Coerce;
instance FromXML LambdaFunctionConfiguration where
parseXML x
= LambdaFunctionConfiguration' <$>
(x .@? "Id") <*> (x .@? "Filter") <*>
(x .@ "CloudFunction")
<*> (parseXMLList "Event" x)
instance ToXML LambdaFunctionConfiguration where
toXML LambdaFunctionConfiguration'{..}
= mconcat
["Id" @= _lfcId, "Filter" @= _lfcFilter,
"CloudFunction" @= _lfcLambdaFunctionARN,
toXMLList "Event" _lfcEvents]
-- | /See:/ 'lifecycleExpiration' smart constructor.
data LifecycleExpiration = LifecycleExpiration'
{ _leDays :: !(Maybe Int)
, _leDate :: !(Maybe RFC822)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'LifecycleExpiration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'leDays'
--
-- * 'leDate'
lifecycleExpiration
:: LifecycleExpiration
lifecycleExpiration =
LifecycleExpiration'
{ _leDays = Nothing
, _leDate = Nothing
}
-- | Indicates the lifetime, in days, of the objects that are subject to the
-- rule. The value must be a non-zero positive integer.
leDays :: Lens' LifecycleExpiration (Maybe Int)
leDays = lens _leDays (\ s a -> s{_leDays = a});
-- | Indicates at what date the object is to be moved or deleted. Should be
-- in GMT ISO 8601 Format.
leDate :: Lens' LifecycleExpiration (Maybe UTCTime)
leDate = lens _leDate (\ s a -> s{_leDate = a}) . mapping _Time;
instance FromXML LifecycleExpiration where
parseXML x
= LifecycleExpiration' <$>
(x .@? "Days") <*> (x .@? "Date")
instance ToXML LifecycleExpiration where
toXML LifecycleExpiration'{..}
= mconcat ["Days" @= _leDays, "Date" @= _leDate]
-- | /See:/ 'lifecycleRule' smart constructor.
data LifecycleRule = LifecycleRule'
{ _lrTransitions :: !(Maybe [Transition])
, _lrNoncurrentVersionExpiration :: !(Maybe NoncurrentVersionExpiration)
, _lrNoncurrentVersionTransitions :: !(Maybe [NoncurrentVersionTransition])
, _lrExpiration :: !(Maybe LifecycleExpiration)
, _lrId :: !(Maybe Text)
, _lrPrefix :: !Text
, _lrStatus :: !ExpirationStatus
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'LifecycleRule' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lrTransitions'
--
-- * 'lrNoncurrentVersionExpiration'
--
-- * 'lrNoncurrentVersionTransitions'
--
-- * 'lrExpiration'
--
-- * 'lrId'
--
-- * 'lrPrefix'
--
-- * 'lrStatus'
lifecycleRule
:: Text -- ^ 'lrPrefix'
-> ExpirationStatus -- ^ 'lrStatus'
-> LifecycleRule
lifecycleRule pPrefix_ pStatus_ =
LifecycleRule'
{ _lrTransitions = Nothing
, _lrNoncurrentVersionExpiration = Nothing
, _lrNoncurrentVersionTransitions = Nothing
, _lrExpiration = Nothing
, _lrId = Nothing
, _lrPrefix = pPrefix_
, _lrStatus = pStatus_
}
-- | Undocumented member.
lrTransitions :: Lens' LifecycleRule [Transition]
lrTransitions = lens _lrTransitions (\ s a -> s{_lrTransitions = a}) . _Default . _Coerce;
-- | Undocumented member.
lrNoncurrentVersionExpiration :: Lens' LifecycleRule (Maybe NoncurrentVersionExpiration)
lrNoncurrentVersionExpiration = lens _lrNoncurrentVersionExpiration (\ s a -> s{_lrNoncurrentVersionExpiration = a});
-- | Undocumented member.
lrNoncurrentVersionTransitions :: Lens' LifecycleRule [NoncurrentVersionTransition]
lrNoncurrentVersionTransitions = lens _lrNoncurrentVersionTransitions (\ s a -> s{_lrNoncurrentVersionTransitions = a}) . _Default . _Coerce;
-- | Undocumented member.
lrExpiration :: Lens' LifecycleRule (Maybe LifecycleExpiration)
lrExpiration = lens _lrExpiration (\ s a -> s{_lrExpiration = a});
-- | Unique identifier for the rule. The value cannot be longer than 255
-- characters.
lrId :: Lens' LifecycleRule (Maybe Text)
lrId = lens _lrId (\ s a -> s{_lrId = a});
-- | Prefix identifying one or more objects to which the rule applies.
lrPrefix :: Lens' LifecycleRule Text
lrPrefix = lens _lrPrefix (\ s a -> s{_lrPrefix = a});
-- | If \'Enabled\', the rule is currently being applied. If \'Disabled\',
-- the rule is not currently being applied.
lrStatus :: Lens' LifecycleRule ExpirationStatus
lrStatus = lens _lrStatus (\ s a -> s{_lrStatus = a});
instance FromXML LifecycleRule where
parseXML x
= LifecycleRule' <$>
(may (parseXMLList "Transition") x) <*>
(x .@? "NoncurrentVersionExpiration")
<*>
(may (parseXMLList "NoncurrentVersionTransition") x)
<*> (x .@? "Expiration")
<*> (x .@? "ID")
<*> (x .@ "Prefix")
<*> (x .@ "Status")
instance ToXML LifecycleRule where
toXML LifecycleRule'{..}
= mconcat
[toXML (toXMLList "Transition" <$> _lrTransitions),
"NoncurrentVersionExpiration" @=
_lrNoncurrentVersionExpiration,
toXML
(toXMLList "NoncurrentVersionTransition" <$>
_lrNoncurrentVersionTransitions),
"Expiration" @= _lrExpiration, "ID" @= _lrId,
"Prefix" @= _lrPrefix, "Status" @= _lrStatus]
-- | /See:/ 'loggingEnabled' smart constructor.
data LoggingEnabled = LoggingEnabled'
{ _leTargetBucket :: !(Maybe Text)
, _leTargetGrants :: !(Maybe [TargetGrant])
, _leTargetPrefix :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'LoggingEnabled' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'leTargetBucket'
--
-- * 'leTargetGrants'
--
-- * 'leTargetPrefix'
loggingEnabled
:: LoggingEnabled
loggingEnabled =
LoggingEnabled'
{ _leTargetBucket = Nothing
, _leTargetGrants = Nothing
, _leTargetPrefix = Nothing
}
-- | Specifies the bucket where you want Amazon S3 to store server access
-- logs. You can have your logs delivered to any bucket that you own,
-- including the same bucket that is being logged. You can also configure
-- multiple buckets to deliver their logs to the same target bucket. In
-- this case you should choose a different TargetPrefix for each source
-- bucket so that the delivered log files can be distinguished by key.
leTargetBucket :: Lens' LoggingEnabled (Maybe Text)
leTargetBucket = lens _leTargetBucket (\ s a -> s{_leTargetBucket = a});
-- | Undocumented member.
leTargetGrants :: Lens' LoggingEnabled [TargetGrant]
leTargetGrants = lens _leTargetGrants (\ s a -> s{_leTargetGrants = a}) . _Default . _Coerce;
-- | This element lets you specify a prefix for the keys that the log files
-- will be stored under.
leTargetPrefix :: Lens' LoggingEnabled (Maybe Text)
leTargetPrefix = lens _leTargetPrefix (\ s a -> s{_leTargetPrefix = a});
instance FromXML LoggingEnabled where
parseXML x
= LoggingEnabled' <$>
(x .@? "TargetBucket") <*>
(x .@? "TargetGrants" .!@ mempty >>=
may (parseXMLList "Grant"))
<*> (x .@? "TargetPrefix")
instance ToXML LoggingEnabled where
toXML LoggingEnabled'{..}
= mconcat
["TargetBucket" @= _leTargetBucket,
"TargetGrants" @=
toXML (toXMLList "Grant" <$> _leTargetGrants),
"TargetPrefix" @= _leTargetPrefix]
-- | /See:/ 'multipartUpload' smart constructor.
data MultipartUpload = MultipartUpload'
{ _muInitiated :: !(Maybe RFC822)
, _muInitiator :: !(Maybe Initiator)
, _muOwner :: !(Maybe Owner)
, _muKey :: !(Maybe ObjectKey)
, _muStorageClass :: !(Maybe StorageClass)
, _muUploadId :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'MultipartUpload' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'muInitiated'
--
-- * 'muInitiator'
--
-- * 'muOwner'
--
-- * 'muKey'
--
-- * 'muStorageClass'
--
-- * 'muUploadId'
multipartUpload
:: MultipartUpload
multipartUpload =
MultipartUpload'
{ _muInitiated = Nothing
, _muInitiator = Nothing
, _muOwner = Nothing
, _muKey = Nothing
, _muStorageClass = Nothing
, _muUploadId = Nothing
}
-- | Date and time at which the multipart upload was initiated.
muInitiated :: Lens' MultipartUpload (Maybe UTCTime)
muInitiated = lens _muInitiated (\ s a -> s{_muInitiated = a}) . mapping _Time;
-- | Identifies who initiated the multipart upload.
muInitiator :: Lens' MultipartUpload (Maybe Initiator)
muInitiator = lens _muInitiator (\ s a -> s{_muInitiator = a});
-- | Undocumented member.
muOwner :: Lens' MultipartUpload (Maybe Owner)
muOwner = lens _muOwner (\ s a -> s{_muOwner = a});
-- | Key of the object for which the multipart upload was initiated.
muKey :: Lens' MultipartUpload (Maybe ObjectKey)
muKey = lens _muKey (\ s a -> s{_muKey = a});
-- | The class of storage used to store the object.
muStorageClass :: Lens' MultipartUpload (Maybe StorageClass)
muStorageClass = lens _muStorageClass (\ s a -> s{_muStorageClass = a});
-- | Upload ID that identifies the multipart upload.
muUploadId :: Lens' MultipartUpload (Maybe Text)
muUploadId = lens _muUploadId (\ s a -> s{_muUploadId = a});
instance FromXML MultipartUpload where
parseXML x
= MultipartUpload' <$>
(x .@? "Initiated") <*> (x .@? "Initiator") <*>
(x .@? "Owner")
<*> (x .@? "Key")
<*> (x .@? "StorageClass")
<*> (x .@? "UploadId")
-- | Specifies when noncurrent object versions expire. Upon expiration,
-- Amazon S3 permanently deletes the noncurrent object versions. You set
-- this lifecycle configuration action on a bucket that has versioning
-- enabled (or suspended) to request that Amazon S3 delete noncurrent
-- object versions at a specific period in the object\'s lifetime.
--
-- /See:/ 'noncurrentVersionExpiration' smart constructor.
newtype NoncurrentVersionExpiration = NoncurrentVersionExpiration'
{ _nveNoncurrentDays :: Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'NoncurrentVersionExpiration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'nveNoncurrentDays'
noncurrentVersionExpiration
:: Int -- ^ 'nveNoncurrentDays'
-> NoncurrentVersionExpiration
noncurrentVersionExpiration pNoncurrentDays_ =
NoncurrentVersionExpiration'
{ _nveNoncurrentDays = pNoncurrentDays_
}
-- | Specifies the number of days an object is noncurrent before Amazon S3
-- can perform the associated action. For information about the noncurrent
-- days calculations, see
-- </AmazonS3/latest/dev/s3-access-control.html How Amazon S3 Calculates When an Object Became Noncurrent>
-- in the Amazon Simple Storage Service Developer Guide.
nveNoncurrentDays :: Lens' NoncurrentVersionExpiration Int
nveNoncurrentDays = lens _nveNoncurrentDays (\ s a -> s{_nveNoncurrentDays = a});
instance FromXML NoncurrentVersionExpiration where
parseXML x
= NoncurrentVersionExpiration' <$>
(x .@ "NoncurrentDays")
instance ToXML NoncurrentVersionExpiration where
toXML NoncurrentVersionExpiration'{..}
= mconcat ["NoncurrentDays" @= _nveNoncurrentDays]
-- | Container for the transition rule that describes when noncurrent objects
-- transition to the STANDARD_IA or GLACIER storage class. If your bucket
-- is versioning-enabled (or versioning is suspended), you can set this
-- action to request that Amazon S3 transition noncurrent object versions
-- to the STANDARD_IA or GLACIER storage class at a specific period in the
-- object\'s lifetime.
--
-- /See:/ 'noncurrentVersionTransition' smart constructor.
data NoncurrentVersionTransition = NoncurrentVersionTransition'
{ _nvtNoncurrentDays :: !Int
, _nvtStorageClass :: !TransitionStorageClass
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'NoncurrentVersionTransition' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'nvtNoncurrentDays'
--
-- * 'nvtStorageClass'
noncurrentVersionTransition
:: Int -- ^ 'nvtNoncurrentDays'
-> TransitionStorageClass -- ^ 'nvtStorageClass'
-> NoncurrentVersionTransition
noncurrentVersionTransition pNoncurrentDays_ pStorageClass_ =
NoncurrentVersionTransition'
{ _nvtNoncurrentDays = pNoncurrentDays_
, _nvtStorageClass = pStorageClass_
}
-- | Specifies the number of days an object is noncurrent before Amazon S3
-- can perform the associated action. For information about the noncurrent
-- days calculations, see
-- </AmazonS3/latest/dev/s3-access-control.html How Amazon S3 Calculates When an Object Became Noncurrent>
-- in the Amazon Simple Storage Service Developer Guide.
nvtNoncurrentDays :: Lens' NoncurrentVersionTransition Int
nvtNoncurrentDays = lens _nvtNoncurrentDays (\ s a -> s{_nvtNoncurrentDays = a});
-- | The class of storage used to store the object.
nvtStorageClass :: Lens' NoncurrentVersionTransition TransitionStorageClass
nvtStorageClass = lens _nvtStorageClass (\ s a -> s{_nvtStorageClass = a});
instance FromXML NoncurrentVersionTransition where
parseXML x
= NoncurrentVersionTransition' <$>
(x .@ "NoncurrentDays") <*> (x .@ "StorageClass")
instance ToXML NoncurrentVersionTransition where
toXML NoncurrentVersionTransition'{..}
= mconcat
["NoncurrentDays" @= _nvtNoncurrentDays,
"StorageClass" @= _nvtStorageClass]
-- | Container for specifying the notification configuration of the bucket.
-- If this element is empty, notifications are turned off on the bucket.
--
-- /See:/ 'notificationConfiguration' smart constructor.
data NotificationConfiguration = NotificationConfiguration'
{ _ncQueueConfigurations :: !(Maybe [QueueConfiguration])
, _ncTopicConfigurations :: !(Maybe [TopicConfiguration])
, _ncLambdaFunctionConfigurations :: !(Maybe [LambdaFunctionConfiguration])
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'NotificationConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ncQueueConfigurations'
--
-- * 'ncTopicConfigurations'
--
-- * 'ncLambdaFunctionConfigurations'
notificationConfiguration
:: NotificationConfiguration
notificationConfiguration =
NotificationConfiguration'
{ _ncQueueConfigurations = Nothing
, _ncTopicConfigurations = Nothing
, _ncLambdaFunctionConfigurations = Nothing
}
-- | Undocumented member.
ncQueueConfigurations :: Lens' NotificationConfiguration [QueueConfiguration]
ncQueueConfigurations = lens _ncQueueConfigurations (\ s a -> s{_ncQueueConfigurations = a}) . _Default . _Coerce;
-- | Undocumented member.
ncTopicConfigurations :: Lens' NotificationConfiguration [TopicConfiguration]
ncTopicConfigurations = lens _ncTopicConfigurations (\ s a -> s{_ncTopicConfigurations = a}) . _Default . _Coerce;
-- | Undocumented member.
ncLambdaFunctionConfigurations :: Lens' NotificationConfiguration [LambdaFunctionConfiguration]
ncLambdaFunctionConfigurations = lens _ncLambdaFunctionConfigurations (\ s a -> s{_ncLambdaFunctionConfigurations = a}) . _Default . _Coerce;
instance FromXML NotificationConfiguration where
parseXML x
= NotificationConfiguration' <$>
(may (parseXMLList "QueueConfiguration") x) <*>
(may (parseXMLList "TopicConfiguration") x)
<*>
(may (parseXMLList "CloudFunctionConfiguration") x)
instance ToXML NotificationConfiguration where
toXML NotificationConfiguration'{..}
= mconcat
[toXML
(toXMLList "QueueConfiguration" <$>
_ncQueueConfigurations),
toXML
(toXMLList "TopicConfiguration" <$>
_ncTopicConfigurations),
toXML
(toXMLList "CloudFunctionConfiguration" <$>
_ncLambdaFunctionConfigurations)]
-- | Container for object key name filtering rules. For information about key
-- name filtering, go to
-- <http://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html Configuring Event Notifications>
-- in the Amazon Simple Storage Service Developer Guide.
--
-- /See:/ 'notificationConfigurationFilter' smart constructor.
newtype NotificationConfigurationFilter = NotificationConfigurationFilter'
{ _ncfKey :: Maybe S3KeyFilter
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'NotificationConfigurationFilter' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ncfKey'
notificationConfigurationFilter
:: NotificationConfigurationFilter
notificationConfigurationFilter =
NotificationConfigurationFilter'
{ _ncfKey = Nothing
}
-- | Undocumented member.
ncfKey :: Lens' NotificationConfigurationFilter (Maybe S3KeyFilter)
ncfKey = lens _ncfKey (\ s a -> s{_ncfKey = a});
instance FromXML NotificationConfigurationFilter
where
parseXML x
= NotificationConfigurationFilter' <$>
(x .@? "S3Key")
instance ToXML NotificationConfigurationFilter where
toXML NotificationConfigurationFilter'{..}
= mconcat ["S3Key" @= _ncfKey]
-- | /See:/ 'object'' smart constructor.
data Object = Object'
{ _oOwner :: !(Maybe Owner)
, _oETag :: !ETag
, _oSize :: !Int
, _oKey :: !ObjectKey
, _oStorageClass :: !ObjectStorageClass
, _oLastModified :: !RFC822
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Object' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oOwner'
--
-- * 'oETag'
--
-- * 'oSize'
--
-- * 'oKey'
--
-- * 'oStorageClass'
--
-- * 'oLastModified'
object'
:: ETag -- ^ 'oETag'
-> Int -- ^ 'oSize'
-> ObjectKey -- ^ 'oKey'
-> ObjectStorageClass -- ^ 'oStorageClass'
-> UTCTime -- ^ 'oLastModified'
-> Object
object' pETag_ pSize_ pKey_ pStorageClass_ pLastModified_ =
Object'
{ _oOwner = Nothing
, _oETag = pETag_
, _oSize = pSize_
, _oKey = pKey_
, _oStorageClass = pStorageClass_
, _oLastModified = _Time # pLastModified_
}
-- | Undocumented member.
oOwner :: Lens' Object (Maybe Owner)
oOwner = lens _oOwner (\ s a -> s{_oOwner = a});
-- | Undocumented member.
oETag :: Lens' Object ETag
oETag = lens _oETag (\ s a -> s{_oETag = a});
-- | Undocumented member.
oSize :: Lens' Object Int
oSize = lens _oSize (\ s a -> s{_oSize = a});
-- | Undocumented member.
oKey :: Lens' Object ObjectKey
oKey = lens _oKey (\ s a -> s{_oKey = a});
-- | The class of storage used to store the object.
oStorageClass :: Lens' Object ObjectStorageClass
oStorageClass = lens _oStorageClass (\ s a -> s{_oStorageClass = a});
-- | Undocumented member.
oLastModified :: Lens' Object UTCTime
oLastModified = lens _oLastModified (\ s a -> s{_oLastModified = a}) . _Time;
instance FromXML Object where
parseXML x
= Object' <$>
(x .@? "Owner") <*> (x .@ "ETag") <*> (x .@ "Size")
<*> (x .@ "Key")
<*> (x .@ "StorageClass")
<*> (x .@ "LastModified")
-- | /See:/ 'objectIdentifier' smart constructor.
data ObjectIdentifier = ObjectIdentifier'
{ _oiVersionId :: !(Maybe ObjectVersionId)
, _oiKey :: !ObjectKey
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ObjectIdentifier' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oiVersionId'
--
-- * 'oiKey'
objectIdentifier
:: ObjectKey -- ^ 'oiKey'
-> ObjectIdentifier
objectIdentifier pKey_ =
ObjectIdentifier'
{ _oiVersionId = Nothing
, _oiKey = pKey_
}
-- | VersionId for the specific version of the object to delete.
oiVersionId :: Lens' ObjectIdentifier (Maybe ObjectVersionId)
oiVersionId = lens _oiVersionId (\ s a -> s{_oiVersionId = a});
-- | Key name of the object to delete.
oiKey :: Lens' ObjectIdentifier ObjectKey
oiKey = lens _oiKey (\ s a -> s{_oiKey = a});
instance ToXML ObjectIdentifier where
toXML ObjectIdentifier'{..}
= mconcat
["VersionId" @= _oiVersionId, "Key" @= _oiKey]
-- | /See:/ 'objectVersion' smart constructor.
data ObjectVersion = ObjectVersion'
{ _ovETag :: !(Maybe ETag)
, _ovVersionId :: !(Maybe ObjectVersionId)
, _ovSize :: !(Maybe Int)
, _ovIsLatest :: !(Maybe Bool)
, _ovOwner :: !(Maybe Owner)
, _ovKey :: !(Maybe ObjectKey)
, _ovStorageClass :: !(Maybe ObjectVersionStorageClass)
, _ovLastModified :: !(Maybe RFC822)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ObjectVersion' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ovETag'
--
-- * 'ovVersionId'
--
-- * 'ovSize'
--
-- * 'ovIsLatest'
--
-- * 'ovOwner'
--
-- * 'ovKey'
--
-- * 'ovStorageClass'
--
-- * 'ovLastModified'
objectVersion
:: ObjectVersion
objectVersion =
ObjectVersion'
{ _ovETag = Nothing
, _ovVersionId = Nothing
, _ovSize = Nothing
, _ovIsLatest = Nothing
, _ovOwner = Nothing
, _ovKey = Nothing
, _ovStorageClass = Nothing
, _ovLastModified = Nothing
}
-- | Undocumented member.
ovETag :: Lens' ObjectVersion (Maybe ETag)
ovETag = lens _ovETag (\ s a -> s{_ovETag = a});
-- | Version ID of an object.
ovVersionId :: Lens' ObjectVersion (Maybe ObjectVersionId)
ovVersionId = lens _ovVersionId (\ s a -> s{_ovVersionId = a});
-- | Size in bytes of the object.
ovSize :: Lens' ObjectVersion (Maybe Int)
ovSize = lens _ovSize (\ s a -> s{_ovSize = a});
-- | Specifies whether the object is (true) or is not (false) the latest
-- version of an object.
ovIsLatest :: Lens' ObjectVersion (Maybe Bool)
ovIsLatest = lens _ovIsLatest (\ s a -> s{_ovIsLatest = a});
-- | Undocumented member.
ovOwner :: Lens' ObjectVersion (Maybe Owner)
ovOwner = lens _ovOwner (\ s a -> s{_ovOwner = a});
-- | The object key.
ovKey :: Lens' ObjectVersion (Maybe ObjectKey)
ovKey = lens _ovKey (\ s a -> s{_ovKey = a});
-- | The class of storage used to store the object.
ovStorageClass :: Lens' ObjectVersion (Maybe ObjectVersionStorageClass)
ovStorageClass = lens _ovStorageClass (\ s a -> s{_ovStorageClass = a});
-- | Date and time the object was last modified.
ovLastModified :: Lens' ObjectVersion (Maybe UTCTime)
ovLastModified = lens _ovLastModified (\ s a -> s{_ovLastModified = a}) . mapping _Time;
instance FromXML ObjectVersion where
parseXML x
= ObjectVersion' <$>
(x .@? "ETag") <*> (x .@? "VersionId") <*>
(x .@? "Size")
<*> (x .@? "IsLatest")
<*> (x .@? "Owner")
<*> (x .@? "Key")
<*> (x .@? "StorageClass")
<*> (x .@? "LastModified")
-- | /See:/ 'owner' smart constructor.
data Owner = Owner'
{ _oDisplayName :: !(Maybe Text)
, _oId :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Owner' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oDisplayName'
--
-- * 'oId'
owner
:: Owner
owner =
Owner'
{ _oDisplayName = Nothing
, _oId = Nothing
}
-- | Undocumented member.
oDisplayName :: Lens' Owner (Maybe Text)
oDisplayName = lens _oDisplayName (\ s a -> s{_oDisplayName = a});
-- | Undocumented member.
oId :: Lens' Owner (Maybe Text)
oId = lens _oId (\ s a -> s{_oId = a});
instance FromXML Owner where
parseXML x
= Owner' <$> (x .@? "DisplayName") <*> (x .@? "ID")
instance ToXML Owner where
toXML Owner'{..}
= mconcat
["DisplayName" @= _oDisplayName, "ID" @= _oId]
-- | /See:/ 'part' smart constructor.
data Part = Part'
{ _pETag :: !(Maybe ETag)
, _pSize :: !(Maybe Int)
, _pPartNumber :: !(Maybe Int)
, _pLastModified :: !(Maybe RFC822)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Part' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pETag'
--
-- * 'pSize'
--
-- * 'pPartNumber'
--
-- * 'pLastModified'
part
:: Part
part =
Part'
{ _pETag = Nothing
, _pSize = Nothing
, _pPartNumber = Nothing
, _pLastModified = Nothing
}
-- | Entity tag returned when the part was uploaded.
pETag :: Lens' Part (Maybe ETag)
pETag = lens _pETag (\ s a -> s{_pETag = a});
-- | Size of the uploaded part data.
pSize :: Lens' Part (Maybe Int)
pSize = lens _pSize (\ s a -> s{_pSize = a});
-- | Part number identifying the part. This is a positive integer between 1
-- and 10,000.
pPartNumber :: Lens' Part (Maybe Int)
pPartNumber = lens _pPartNumber (\ s a -> s{_pPartNumber = a});
-- | Date and time at which the part was uploaded.
pLastModified :: Lens' Part (Maybe UTCTime)
pLastModified = lens _pLastModified (\ s a -> s{_pLastModified = a}) . mapping _Time;
instance FromXML Part where
parseXML x
= Part' <$>
(x .@? "ETag") <*> (x .@? "Size") <*>
(x .@? "PartNumber")
<*> (x .@? "LastModified")
-- | Container for specifying an configuration when you want Amazon S3 to
-- publish events to an Amazon Simple Queue Service (Amazon SQS) queue.
--
-- /See:/ 'queueConfiguration' smart constructor.
data QueueConfiguration = QueueConfiguration'
{ _qcId :: !(Maybe Text)
, _qcFilter :: !(Maybe NotificationConfigurationFilter)
, _qcQueueARN :: !Text
, _qcEvents :: ![Event]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'QueueConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'qcId'
--
-- * 'qcFilter'
--
-- * 'qcQueueARN'
--
-- * 'qcEvents'
queueConfiguration
:: Text -- ^ 'qcQueueARN'
-> QueueConfiguration
queueConfiguration pQueueARN_ =
QueueConfiguration'
{ _qcId = Nothing
, _qcFilter = Nothing
, _qcQueueARN = pQueueARN_
, _qcEvents = mempty
}
-- | Undocumented member.
qcId :: Lens' QueueConfiguration (Maybe Text)
qcId = lens _qcId (\ s a -> s{_qcId = a});
-- | Undocumented member.
qcFilter :: Lens' QueueConfiguration (Maybe NotificationConfigurationFilter)
qcFilter = lens _qcFilter (\ s a -> s{_qcFilter = a});
-- | Amazon SQS queue ARN to which Amazon S3 will publish a message when it
-- detects events of specified type.
qcQueueARN :: Lens' QueueConfiguration Text
qcQueueARN = lens _qcQueueARN (\ s a -> s{_qcQueueARN = a});
-- | Undocumented member.
qcEvents :: Lens' QueueConfiguration [Event]
qcEvents = lens _qcEvents (\ s a -> s{_qcEvents = a}) . _Coerce;
instance FromXML QueueConfiguration where
parseXML x
= QueueConfiguration' <$>
(x .@? "Id") <*> (x .@? "Filter") <*> (x .@ "Queue")
<*> (parseXMLList "Event" x)
instance ToXML QueueConfiguration where
toXML QueueConfiguration'{..}
= mconcat
["Id" @= _qcId, "Filter" @= _qcFilter,
"Queue" @= _qcQueueARN, toXMLList "Event" _qcEvents]
-- | /See:/ 'redirect' smart constructor.
data Redirect = Redirect'
{ _rHostName :: !(Maybe Text)
, _rProtocol :: !(Maybe Protocol)
, _rHTTPRedirectCode :: !(Maybe Text)
, _rReplaceKeyWith :: !(Maybe Text)
, _rReplaceKeyPrefixWith :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Redirect' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rHostName'
--
-- * 'rProtocol'
--
-- * 'rHTTPRedirectCode'
--
-- * 'rReplaceKeyWith'
--
-- * 'rReplaceKeyPrefixWith'
redirect
:: Redirect
redirect =
Redirect'
{ _rHostName = Nothing
, _rProtocol = Nothing
, _rHTTPRedirectCode = Nothing
, _rReplaceKeyWith = Nothing
, _rReplaceKeyPrefixWith = Nothing
}
-- | The host name to use in the redirect request.
rHostName :: Lens' Redirect (Maybe Text)
rHostName = lens _rHostName (\ s a -> s{_rHostName = a});
-- | Protocol to use (http, https) when redirecting requests. The default is
-- the protocol that is used in the original request.
rProtocol :: Lens' Redirect (Maybe Protocol)
rProtocol = lens _rProtocol (\ s a -> s{_rProtocol = a});
-- | The HTTP redirect code to use on the response. Not required if one of
-- the siblings is present.
rHTTPRedirectCode :: Lens' Redirect (Maybe Text)
rHTTPRedirectCode = lens _rHTTPRedirectCode (\ s a -> s{_rHTTPRedirectCode = a});
-- | The specific object key to use in the redirect request. For example,
-- redirect request to error.html. Not required if one of the sibling is
-- present. Can be present only if ReplaceKeyPrefixWith is not provided.
rReplaceKeyWith :: Lens' Redirect (Maybe Text)
rReplaceKeyWith = lens _rReplaceKeyWith (\ s a -> s{_rReplaceKeyWith = a});
-- | The object key prefix to use in the redirect request. For example, to
-- redirect requests for all pages with prefix docs\/ (objects in the
-- docs\/ folder) to documents\/, you can set a condition block with
-- KeyPrefixEquals set to docs\/ and in the Redirect set
-- ReplaceKeyPrefixWith to \/documents. Not required if one of the siblings
-- is present. Can be present only if ReplaceKeyWith is not provided.
rReplaceKeyPrefixWith :: Lens' Redirect (Maybe Text)
rReplaceKeyPrefixWith = lens _rReplaceKeyPrefixWith (\ s a -> s{_rReplaceKeyPrefixWith = a});
instance FromXML Redirect where
parseXML x
= Redirect' <$>
(x .@? "HostName") <*> (x .@? "Protocol") <*>
(x .@? "HttpRedirectCode")
<*> (x .@? "ReplaceKeyWith")
<*> (x .@? "ReplaceKeyPrefixWith")
instance ToXML Redirect where
toXML Redirect'{..}
= mconcat
["HostName" @= _rHostName, "Protocol" @= _rProtocol,
"HttpRedirectCode" @= _rHTTPRedirectCode,
"ReplaceKeyWith" @= _rReplaceKeyWith,
"ReplaceKeyPrefixWith" @= _rReplaceKeyPrefixWith]
-- | /See:/ 'redirectAllRequestsTo' smart constructor.
data RedirectAllRequestsTo = RedirectAllRequestsTo'
{ _rartProtocol :: !(Maybe Protocol)
, _rartHostName :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RedirectAllRequestsTo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rartProtocol'
--
-- * 'rartHostName'
redirectAllRequestsTo
:: Text -- ^ 'rartHostName'
-> RedirectAllRequestsTo
redirectAllRequestsTo pHostName_ =
RedirectAllRequestsTo'
{ _rartProtocol = Nothing
, _rartHostName = pHostName_
}
-- | Protocol to use (http, https) when redirecting requests. The default is
-- the protocol that is used in the original request.
rartProtocol :: Lens' RedirectAllRequestsTo (Maybe Protocol)
rartProtocol = lens _rartProtocol (\ s a -> s{_rartProtocol = a});
-- | Name of the host where requests will be redirected.
rartHostName :: Lens' RedirectAllRequestsTo Text
rartHostName = lens _rartHostName (\ s a -> s{_rartHostName = a});
instance FromXML RedirectAllRequestsTo where
parseXML x
= RedirectAllRequestsTo' <$>
(x .@? "Protocol") <*> (x .@ "HostName")
instance ToXML RedirectAllRequestsTo where
toXML RedirectAllRequestsTo'{..}
= mconcat
["Protocol" @= _rartProtocol,
"HostName" @= _rartHostName]
-- | Container for replication rules. You can add as many as 1,000 rules.
-- Total replication configuration size can be up to 2 MB.
--
-- /See:/ 'replicationConfiguration' smart constructor.
data ReplicationConfiguration = ReplicationConfiguration'
{ _rcRole :: !Text
, _rcRules :: ![ReplicationRule]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReplicationConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rcRole'
--
-- * 'rcRules'
replicationConfiguration
:: Text -- ^ 'rcRole'
-> ReplicationConfiguration
replicationConfiguration pRole_ =
ReplicationConfiguration'
{ _rcRole = pRole_
, _rcRules = mempty
}
-- | Amazon Resource Name (ARN) of an IAM role for Amazon S3 to assume when
-- replicating the objects.
rcRole :: Lens' ReplicationConfiguration Text
rcRole = lens _rcRole (\ s a -> s{_rcRole = a});
-- | Container for information about a particular replication rule.
-- Replication configuration must have at least one rule and can contain up
-- to 1,000 rules.
rcRules :: Lens' ReplicationConfiguration [ReplicationRule]
rcRules = lens _rcRules (\ s a -> s{_rcRules = a}) . _Coerce;
instance FromXML ReplicationConfiguration where
parseXML x
= ReplicationConfiguration' <$>
(x .@ "Role") <*> (parseXMLList "Rule" x)
instance ToXML ReplicationConfiguration where
toXML ReplicationConfiguration'{..}
= mconcat
["Role" @= _rcRole, toXMLList "Rule" _rcRules]
-- | /See:/ 'replicationRule' smart constructor.
data ReplicationRule = ReplicationRule'
{ _rrId :: !(Maybe Text)
, _rrPrefix :: !Text
, _rrStatus :: !ReplicationRuleStatus
, _rrDestination :: !Destination
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReplicationRule' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rrId'
--
-- * 'rrPrefix'
--
-- * 'rrStatus'
--
-- * 'rrDestination'
replicationRule
:: Text -- ^ 'rrPrefix'
-> ReplicationRuleStatus -- ^ 'rrStatus'
-> Destination -- ^ 'rrDestination'
-> ReplicationRule
replicationRule pPrefix_ pStatus_ pDestination_ =
ReplicationRule'
{ _rrId = Nothing
, _rrPrefix = pPrefix_
, _rrStatus = pStatus_
, _rrDestination = pDestination_
}
-- | Unique identifier for the rule. The value cannot be longer than 255
-- characters.
rrId :: Lens' ReplicationRule (Maybe Text)
rrId = lens _rrId (\ s a -> s{_rrId = a});
-- | Object keyname prefix identifying one or more objects to which the rule
-- applies. Maximum prefix length can be up to 1,024 characters.
-- Overlapping prefixes are not supported.
rrPrefix :: Lens' ReplicationRule Text
rrPrefix = lens _rrPrefix (\ s a -> s{_rrPrefix = a});
-- | The rule is ignored if status is not Enabled.
rrStatus :: Lens' ReplicationRule ReplicationRuleStatus
rrStatus = lens _rrStatus (\ s a -> s{_rrStatus = a});
-- | Undocumented member.
rrDestination :: Lens' ReplicationRule Destination
rrDestination = lens _rrDestination (\ s a -> s{_rrDestination = a});
instance FromXML ReplicationRule where
parseXML x
= ReplicationRule' <$>
(x .@? "ID") <*> (x .@ "Prefix") <*> (x .@ "Status")
<*> (x .@ "Destination")
instance ToXML ReplicationRule where
toXML ReplicationRule'{..}
= mconcat
["ID" @= _rrId, "Prefix" @= _rrPrefix,
"Status" @= _rrStatus,
"Destination" @= _rrDestination]
-- | /See:/ 'requestPaymentConfiguration' smart constructor.
newtype RequestPaymentConfiguration = RequestPaymentConfiguration'
{ _rpcPayer :: Payer
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RequestPaymentConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rpcPayer'
requestPaymentConfiguration
:: Payer -- ^ 'rpcPayer'
-> RequestPaymentConfiguration
requestPaymentConfiguration pPayer_ =
RequestPaymentConfiguration'
{ _rpcPayer = pPayer_
}
-- | Specifies who pays for the download and request fees.
rpcPayer :: Lens' RequestPaymentConfiguration Payer
rpcPayer = lens _rpcPayer (\ s a -> s{_rpcPayer = a});
instance ToXML RequestPaymentConfiguration where
toXML RequestPaymentConfiguration'{..}
= mconcat ["Payer" @= _rpcPayer]
-- | /See:/ 'restoreRequest' smart constructor.
newtype RestoreRequest = RestoreRequest'
{ _rrDays :: Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RestoreRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rrDays'
restoreRequest
:: Int -- ^ 'rrDays'
-> RestoreRequest
restoreRequest pDays_ =
RestoreRequest'
{ _rrDays = pDays_
}
-- | Lifetime of the active copy in days
rrDays :: Lens' RestoreRequest Int
rrDays = lens _rrDays (\ s a -> s{_rrDays = a});
instance ToXML RestoreRequest where
toXML RestoreRequest'{..}
= mconcat ["Days" @= _rrDays]
-- | /See:/ 'routingRule' smart constructor.
data RoutingRule = RoutingRule'
{ _rrCondition :: !(Maybe Condition)
, _rrRedirect :: !Redirect
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RoutingRule' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rrCondition'
--
-- * 'rrRedirect'
routingRule
:: Redirect -- ^ 'rrRedirect'
-> RoutingRule
routingRule pRedirect_ =
RoutingRule'
{ _rrCondition = Nothing
, _rrRedirect = pRedirect_
}
-- | A container for describing a condition that must be met for the
-- specified redirect to apply. For example, 1. If request is for pages in
-- the \/docs folder, redirect to the \/documents folder. 2. If request
-- results in HTTP error 4xx, redirect request to another host where you
-- might process the error.
rrCondition :: Lens' RoutingRule (Maybe Condition)
rrCondition = lens _rrCondition (\ s a -> s{_rrCondition = a});
-- | Container for redirect information. You can redirect requests to another
-- host, to another page, or with another protocol. In the event of an
-- error, you can can specify a different error code to return.
rrRedirect :: Lens' RoutingRule Redirect
rrRedirect = lens _rrRedirect (\ s a -> s{_rrRedirect = a});
instance FromXML RoutingRule where
parseXML x
= RoutingRule' <$>
(x .@? "Condition") <*> (x .@ "Redirect")
instance ToXML RoutingRule where
toXML RoutingRule'{..}
= mconcat
["Condition" @= _rrCondition,
"Redirect" @= _rrRedirect]
-- | Container for object key name prefix and suffix filtering rules.
--
-- /See:/ 's3KeyFilter' smart constructor.
newtype S3KeyFilter = S3KeyFilter'
{ _skfFilterRules :: Maybe [FilterRule]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'S3KeyFilter' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'skfFilterRules'
s3KeyFilter
:: S3KeyFilter
s3KeyFilter =
S3KeyFilter'
{ _skfFilterRules = Nothing
}
-- | Undocumented member.
skfFilterRules :: Lens' S3KeyFilter [FilterRule]
skfFilterRules = lens _skfFilterRules (\ s a -> s{_skfFilterRules = a}) . _Default . _Coerce;
instance FromXML S3KeyFilter where
parseXML x
= S3KeyFilter' <$>
(may (parseXMLList "FilterRule") x)
instance ToXML S3KeyFilter where
toXML S3KeyFilter'{..}
= mconcat
[toXML (toXMLList "FilterRule" <$> _skfFilterRules)]
-- | /See:/ 's3ServiceError' smart constructor.
data S3ServiceError = S3ServiceError'
{ _sseVersionId :: !(Maybe ObjectVersionId)
, _sseKey :: !(Maybe ObjectKey)
, _sseCode :: !(Maybe Text)
, _sseMessage :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'S3ServiceError' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sseVersionId'
--
-- * 'sseKey'
--
-- * 'sseCode'
--
-- * 'sseMessage'
s3ServiceError
:: S3ServiceError
s3ServiceError =
S3ServiceError'
{ _sseVersionId = Nothing
, _sseKey = Nothing
, _sseCode = Nothing
, _sseMessage = Nothing
}
-- | Undocumented member.
sseVersionId :: Lens' S3ServiceError (Maybe ObjectVersionId)
sseVersionId = lens _sseVersionId (\ s a -> s{_sseVersionId = a});
-- | Undocumented member.
sseKey :: Lens' S3ServiceError (Maybe ObjectKey)
sseKey = lens _sseKey (\ s a -> s{_sseKey = a});
-- | Undocumented member.
sseCode :: Lens' S3ServiceError (Maybe Text)
sseCode = lens _sseCode (\ s a -> s{_sseCode = a});
-- | Undocumented member.
sseMessage :: Lens' S3ServiceError (Maybe Text)
sseMessage = lens _sseMessage (\ s a -> s{_sseMessage = a});
instance FromXML S3ServiceError where
parseXML x
= S3ServiceError' <$>
(x .@? "VersionId") <*> (x .@? "Key") <*>
(x .@? "Code")
<*> (x .@? "Message")
-- | /See:/ 'tag' smart constructor.
data Tag = Tag'
{ _tagKey :: !ObjectKey
, _tagValue :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Tag' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tagKey'
--
-- * 'tagValue'
tag
:: ObjectKey -- ^ 'tagKey'
-> Text -- ^ 'tagValue'
-> Tag
tag pKey_ pValue_ =
Tag'
{ _tagKey = pKey_
, _tagValue = pValue_
}
-- | Name of the tag.
tagKey :: Lens' Tag ObjectKey
tagKey = lens _tagKey (\ s a -> s{_tagKey = a});
-- | Value of the tag.
tagValue :: Lens' Tag Text
tagValue = lens _tagValue (\ s a -> s{_tagValue = a});
instance FromXML Tag where
parseXML x = Tag' <$> (x .@ "Key") <*> (x .@ "Value")
instance ToXML Tag where
toXML Tag'{..}
= mconcat ["Key" @= _tagKey, "Value" @= _tagValue]
-- | /See:/ 'tagging' smart constructor.
newtype Tagging = Tagging'
{ _tTagSet :: [Tag]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Tagging' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tTagSet'
tagging
:: Tagging
tagging =
Tagging'
{ _tTagSet = mempty
}
-- | Undocumented member.
tTagSet :: Lens' Tagging [Tag]
tTagSet = lens _tTagSet (\ s a -> s{_tTagSet = a}) . _Coerce;
instance ToXML Tagging where
toXML Tagging'{..}
= mconcat ["TagSet" @= toXMLList "Tag" _tTagSet]
-- | /See:/ 'targetGrant' smart constructor.
data TargetGrant = TargetGrant'
{ _tgPermission :: !(Maybe BucketLogsPermission)
, _tgGrantee :: !(Maybe Grantee)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'TargetGrant' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tgPermission'
--
-- * 'tgGrantee'
targetGrant
:: TargetGrant
targetGrant =
TargetGrant'
{ _tgPermission = Nothing
, _tgGrantee = Nothing
}
-- | Logging permissions assigned to the Grantee for the bucket.
tgPermission :: Lens' TargetGrant (Maybe BucketLogsPermission)
tgPermission = lens _tgPermission (\ s a -> s{_tgPermission = a});
-- | Undocumented member.
tgGrantee :: Lens' TargetGrant (Maybe Grantee)
tgGrantee = lens _tgGrantee (\ s a -> s{_tgGrantee = a});
instance FromXML TargetGrant where
parseXML x
= TargetGrant' <$>
(x .@? "Permission") <*> (x .@? "Grantee")
instance ToXML TargetGrant where
toXML TargetGrant'{..}
= mconcat
["Permission" @= _tgPermission,
"Grantee" @= _tgGrantee]
-- | Container for specifying the configuration when you want Amazon S3 to
-- publish events to an Amazon Simple Notification Service (Amazon SNS)
-- topic.
--
-- /See:/ 'topicConfiguration' smart constructor.
data TopicConfiguration = TopicConfiguration'
{ _tcId :: !(Maybe Text)
, _tcFilter :: !(Maybe NotificationConfigurationFilter)
, _tcTopicARN :: !Text
, _tcEvents :: ![Event]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'TopicConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tcId'
--
-- * 'tcFilter'
--
-- * 'tcTopicARN'
--
-- * 'tcEvents'
topicConfiguration
:: Text -- ^ 'tcTopicARN'
-> TopicConfiguration
topicConfiguration pTopicARN_ =
TopicConfiguration'
{ _tcId = Nothing
, _tcFilter = Nothing
, _tcTopicARN = pTopicARN_
, _tcEvents = mempty
}
-- | Undocumented member.
tcId :: Lens' TopicConfiguration (Maybe Text)
tcId = lens _tcId (\ s a -> s{_tcId = a});
-- | Undocumented member.
tcFilter :: Lens' TopicConfiguration (Maybe NotificationConfigurationFilter)
tcFilter = lens _tcFilter (\ s a -> s{_tcFilter = a});
-- | Amazon SNS topic ARN to which Amazon S3 will publish a message when it
-- detects events of specified type.
tcTopicARN :: Lens' TopicConfiguration Text
tcTopicARN = lens _tcTopicARN (\ s a -> s{_tcTopicARN = a});
-- | Undocumented member.
tcEvents :: Lens' TopicConfiguration [Event]
tcEvents = lens _tcEvents (\ s a -> s{_tcEvents = a}) . _Coerce;
instance FromXML TopicConfiguration where
parseXML x
= TopicConfiguration' <$>
(x .@? "Id") <*> (x .@? "Filter") <*> (x .@ "Topic")
<*> (parseXMLList "Event" x)
instance ToXML TopicConfiguration where
toXML TopicConfiguration'{..}
= mconcat
["Id" @= _tcId, "Filter" @= _tcFilter,
"Topic" @= _tcTopicARN, toXMLList "Event" _tcEvents]
-- | /See:/ 'transition' smart constructor.
data Transition = Transition'
{ _tDays :: !(Maybe Int)
, _tDate :: !(Maybe RFC822)
, _tStorageClass :: !(Maybe TransitionStorageClass)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Transition' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tDays'
--
-- * 'tDate'
--
-- * 'tStorageClass'
transition
:: Transition
transition =
Transition'
{ _tDays = Nothing
, _tDate = Nothing
, _tStorageClass = Nothing
}
-- | Indicates the lifetime, in days, of the objects that are subject to the
-- rule. The value must be a non-zero positive integer.
tDays :: Lens' Transition (Maybe Int)
tDays = lens _tDays (\ s a -> s{_tDays = a});
-- | Indicates at what date the object is to be moved or deleted. Should be
-- in GMT ISO 8601 Format.
tDate :: Lens' Transition (Maybe UTCTime)
tDate = lens _tDate (\ s a -> s{_tDate = a}) . mapping _Time;
-- | The class of storage used to store the object.
tStorageClass :: Lens' Transition (Maybe TransitionStorageClass)
tStorageClass = lens _tStorageClass (\ s a -> s{_tStorageClass = a});
instance FromXML Transition where
parseXML x
= Transition' <$>
(x .@? "Days") <*> (x .@? "Date") <*>
(x .@? "StorageClass")
instance ToXML Transition where
toXML Transition'{..}
= mconcat
["Days" @= _tDays, "Date" @= _tDate,
"StorageClass" @= _tStorageClass]
-- | /See:/ 'versioningConfiguration' smart constructor.
data VersioningConfiguration = VersioningConfiguration'
{ _vcStatus :: !(Maybe BucketVersioningStatus)
, _vcMFADelete :: !(Maybe MFADelete)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'VersioningConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vcStatus'
--
-- * 'vcMFADelete'
versioningConfiguration
:: VersioningConfiguration
versioningConfiguration =
VersioningConfiguration'
{ _vcStatus = Nothing
, _vcMFADelete = Nothing
}
-- | The versioning state of the bucket.
vcStatus :: Lens' VersioningConfiguration (Maybe BucketVersioningStatus)
vcStatus = lens _vcStatus (\ s a -> s{_vcStatus = a});
-- | Specifies whether MFA delete is enabled in the bucket versioning
-- configuration. This element is only returned if the bucket has been
-- configured with MFA delete. If the bucket has never been so configured,
-- this element is not returned.
vcMFADelete :: Lens' VersioningConfiguration (Maybe MFADelete)
vcMFADelete = lens _vcMFADelete (\ s a -> s{_vcMFADelete = a});
instance ToXML VersioningConfiguration where
toXML VersioningConfiguration'{..}
= mconcat
["Status" @= _vcStatus, "MfaDelete" @= _vcMFADelete]
-- | /See:/ 'websiteConfiguration' smart constructor.
data WebsiteConfiguration = WebsiteConfiguration'
{ _wcRedirectAllRequestsTo :: !(Maybe RedirectAllRequestsTo)
, _wcErrorDocument :: !(Maybe ErrorDocument)
, _wcIndexDocument :: !(Maybe IndexDocument)
, _wcRoutingRules :: !(Maybe [RoutingRule])
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'WebsiteConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'wcRedirectAllRequestsTo'
--
-- * 'wcErrorDocument'
--
-- * 'wcIndexDocument'
--
-- * 'wcRoutingRules'
websiteConfiguration
:: WebsiteConfiguration
websiteConfiguration =
WebsiteConfiguration'
{ _wcRedirectAllRequestsTo = Nothing
, _wcErrorDocument = Nothing
, _wcIndexDocument = Nothing
, _wcRoutingRules = Nothing
}
-- | Undocumented member.
wcRedirectAllRequestsTo :: Lens' WebsiteConfiguration (Maybe RedirectAllRequestsTo)
wcRedirectAllRequestsTo = lens _wcRedirectAllRequestsTo (\ s a -> s{_wcRedirectAllRequestsTo = a});
-- | Undocumented member.
wcErrorDocument :: Lens' WebsiteConfiguration (Maybe ErrorDocument)
wcErrorDocument = lens _wcErrorDocument (\ s a -> s{_wcErrorDocument = a});
-- | Undocumented member.
wcIndexDocument :: Lens' WebsiteConfiguration (Maybe IndexDocument)
wcIndexDocument = lens _wcIndexDocument (\ s a -> s{_wcIndexDocument = a});
-- | Undocumented member.
wcRoutingRules :: Lens' WebsiteConfiguration [RoutingRule]
wcRoutingRules = lens _wcRoutingRules (\ s a -> s{_wcRoutingRules = a}) . _Default . _Coerce;
instance ToXML WebsiteConfiguration where
toXML WebsiteConfiguration'{..}
= mconcat
["RedirectAllRequestsTo" @= _wcRedirectAllRequestsTo,
"ErrorDocument" @= _wcErrorDocument,
"IndexDocument" @= _wcIndexDocument,
"RoutingRules" @=
toXML (toXMLList "RoutingRule" <$> _wcRoutingRules)]
|
olorin/amazonka
|
amazonka-s3/gen/Network/AWS/S3/Types/Product.hs
|
mpl-2.0
| 82,877 | 0 | 16 | 18,089 | 16,510 | 9,447 | 7,063 | 1,605 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Lib where
import Control.Lens ((&), (.~), (^.))
import Data.ByteString.Lazy.Char8 as BSLC8
import Data.List as L
import Data.Text as T
import Data.Text.IO as T
import Data.Thyme.Time
import Network.HTTP.Client.OpenSSL
import Network.Wreq as W
import OpenSSL.Session (context)
import Prelude as P
import Text.Blaze.Html.Renderer.Utf8 (renderHtml)
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Text.HTML.TagSoup
------------------------------------------------------------------------------
-- constants
baseUrl = "https://www.zillow.com"
page1Url =
baseUrl ++ "/homes/for_sale/84103_rb/?fromHomePage=true&shouldFireSellPageImplicitClaimGA=false&fromHomePageTab=buy"
dataDir = "data/"
------------------------------------------------------------------------------
-- downloads
downloadAll = do
filename <- download page1Url 1
-- let filename = P.concat [dataDir, "2017-02-13", "-p", show 1, ".htm"]
txt <- T.readFile filename
let links = pageLinks txt
filenames <- mapM (\(pagelink,n) -> download (baseUrl ++ T.unpack pagelink) n)
(P.zip links [2 ..])
return (filename : filenames)
download url n = do
date <- getDate
src <- openUrl url
let filename = P.concat [dataDir, date, "-p", show n, ".htm"]
P.writeFile filename src
return filename
openUrl url =
withOpenSSL $ do
r <- getWith opts url
print (r ^. responseStatus)
print (r ^. responseHeaders)
return (BSLC8.unpack (r ^. responseBody))
where
opts = defaults & manager .~ Left (opensslManagerSettings context)
------------------------------------------------------------------------------
-- page links
pageLinks txt =
L.nub (pickOutPageLinks (P.takeWhile (\x -> x /= TagClose "ol")
(P.head (getPageLinks txt))))
getPageLinks :: Text -> [[Tag Text]]
getPageLinks = textParseTags "<ol class=zsg-pagination>"
pickOutPageLinks [] = []
pickOutPageLinks (x:xs) =
case x of
TagOpen tagName attributes | tagName == "a" && not (P.null attributes)
-> snd (P.head attributes) : pickOutPageLinks xs
_ -> pickOutPageLinks xs
------------------------------------------------------------------------------
-- listings (via <article>) in page
data Listing = Listing
{ link :: Text -- also the identity
, photolink :: Maybe Text
, address :: Text
, price :: Int
, priceS :: Text
} deriving (Eq, Read, Show)
-- these two for quick checks
pl = ppp "./data/2017-02-16-p1.htm" listings
pal = do
txt <- T.readFile "./data/2017-02-16-p1.htm"
mapM_ (mapM_ print) (getArticles txt)
-- printTags =<< allListings ["data/2017-02-16-p1.htm", "data/2017-02-16-p2.htm", "data/2017-02-16-p3.htm"]
-- printTags =<< allListings ["data/2017-02-13-p1.htm", "data/2017-02-13-p2.htm", "data/2017-02-13-p3.htm"]
allListings filenames = do
ls <- mapM go filenames
return (P.concat ls)
where
go filename = do
txt <- T.readFile filename
return (listings txt)
listings txt =
sortIt
(listify
(P.filter hasPrice
(P.map (cleanse . pickData . pickTags)
(getArticles txt))))
getArticles :: Text -> [[Tag Text]]
getArticles = textParseTags "<article>"
pickTags :: [Tag Text] -> [Tag Text]
pickTags = myFilter False
where
myFilter _ [] = []
myFilter True (x:xs) = x : myFilter False xs
myFilter False (x:xs)
| x == TagOpen "span" [("itemprop", "streetAddress")] ||
x == TagOpen "span" [("class", "zsg-photo-card-price")]
= myFilter True xs
| x ~== ("<a>" :: String) || x ~== ("<img>" :: String)
= x : myFilter False xs
| otherwise = myFilter False xs
pickData :: [Tag Text] -> [Text]
pickData = P.map f
where
f x = case x of
(TagText t) -> t
(TagOpen _ xs) -> if not (P.null xs) then snd $ P.head xs else ""
cleanse [] = []
cleanse (x:xs)
| T.isPrefixOf "/homedetails/" x || T.isPrefixOf "https://photos" x
= x : cleanse xs
| T.isPrefixOf "/homedetail/AuthRequired.htm" x
|| T.isPrefixOf "/" x
|| T.isPrefixOf "https://dev.virtualearth.net" x
|| T.isPrefixOf "https://sb.scorecardresearch.com" x
|| P.elem x zillowIgnore
= cleanse xs
| otherwise = x : cleanse xs
hasPrice = P.any (T.isPrefixOf "$")
listify :: [[Text]] -> [Listing]
listify = P.map (\(address:price:link:rest) ->
Listing link
(if not (P.null rest) then Just (P.head rest) else Nothing)
address
(n price)
price)
where
n x = read (T.unpack (T.filter (\x -> x /= '$' && x /= ',') x)) :: Int
sortIt :: [Listing] -> [Listing]
sortIt = L.sortBy (\l1 l2 -> price l1 `compare` price l2)
zillowIgnore = ["option","zsg-lightbox-show za-track-event","http://www.zillow.com/local-info/","http://www.facebook.com/Zillow","http://twitter.com/zillow","http://plus.google.com/+Zillow","zsg-notification-bar-close","mapped-result-count","#","#","#","#","#","#","menu-label","#fore-tip-filters","#coming-soon-tip-filters","#pm-tip-filters","#pmf-tip-filters","#pre-foreclosure-tip-filters","#mmm-tip-filters","#pending-tip-filters","price-menu-label","saf-entry-link","#payment","#income","#","saf-close zsg-button","saf-pre-approval-link","beds-menu-label","type-menu-label","menu-label","#hoa-dues-tooltip","http://www.zillow.com/community-pillar/","zsg-button_primary"]
------------------------------------------------------------------------------
-- display
writeDisplayListings = do
al <- allListings ["data/2017-02-16-p1.htm", "data/2017-02-16-p2.htm", "data/2017-02-16-p3.htm"]
-- print al
let dl = displayListings al
let rl = renderHtml dl
-- print rl
return $ BSLC8.writeFile "/tmp/xxx.html" rl
displayListings xs = H.docTypeHtml $
H.head $ do
H.title "84103 listings"
H.body $
mapM_ displayListing xs
displayListing :: Listing -> H.Html
displayListing (Listing pagelink photolink address price priceS) = do
H.hr
case photolink of
Just l -> H.img H.! A.src (H.preEscapedTextValue l) H.! A.alt (H.preEscapedTextValue l)
Nothing -> H.wbr
H.string " "
H.string (T.unpack address)
H.string " "
H.string (T.unpack priceS)
H.string " "
H.a H.! A.href (H.preEscapedStringValue (baseUrl ++ T.unpack pagelink)) $ "details"
renderListings = renderHtml
------------------------------------------------------------------------------
-- util
textParseTags :: String -> Text -> [[Tag Text]]
textParseTags tag t = do
let ts = parseTags t
partitions (~== tag) ts
getDate = do
now <- getCurrentTime
myzone <- getCurrentTimeZone
let x = show (utcToZonedTime myzone now :: ZonedTime)
return $ P.take 10 x
ppp filename f = do
txt <- T.readFile filename
printTags (f txt)
printTags = mapM_ print
|
haroldcarr/learn-haskell-coq-ml-etc
|
haskell/playpen/zillow/src/Lib.hs
|
unlicense
| 7,346 | 0 | 16 | 1,790 | 2,099 | 1,088 | 1,011 | 151 | 3 |
{-# LANGUAGE TemplateHaskell #-}
module Cache
( Cache(..)
, newCache
, destroyCache
-- , cacheChunks TODO: what, this is never used?
, cacheStars
) where
import Control.Lens
import Data.IORef
-- import qualified Data.Map as M
import qualified SDL as SDL
-- import Coordinate
data Cache = MkCache
{ -- _cacheChunks :: M.Map Coordinate SDL.Texture
_cacheStars :: [SDL.Texture]
}
makeLenses ''Cache
newCache :: IO (IORef Cache)
newCache = newIORef defaultCache
defaultCache :: Cache
defaultCache = MkCache
{ -- _cacheChunks = M.empty
_cacheStars = []
}
destroyCache :: IORef Cache -> IO ()
destroyCache cache = writeIORef cache defaultCache
|
nitrix/lspace
|
legacy/Cache.hs
|
unlicense
| 695 | 0 | 10 | 154 | 144 | 84 | 60 | 19 | 1 |
module Constant where
newtype Constant a b =
Constant { getConstant :: a }
deriving (Eq, Ord, Show)
instance Functor (Constant a) where
fmap _ (Constant a) = Constant a
instance Monoid a => Applicative (Constant a) where
pure b = Constant mempty
(Constant a) <*> (Constant a') = Constant (a `mappend` a')
|
thewoolleyman/haskellbook
|
17/05/maor/Constant.hs
|
unlicense
| 318 | 0 | 8 | 66 | 134 | 71 | 63 | 9 | 0 |
module Problem011 where
main = do str <- readFile "problem-011.txt"
let xs = map (read :: String -> Int) $ words str
print $ maximum $ [prod xs is | i <- [0..400], o <- offsets, let is = map (+ i) o, last is < 400]
offsets = [[0, 1, 2, 3]
,[0,20,40,60]
,[0,21,42,63]
,[3,22,41,60]
]
prod xs is = product $ map (xs !!) is
|
vasily-kartashov/playground
|
euler/problem-011.hs
|
apache-2.0
| 384 | 0 | 14 | 130 | 206 | 114 | 92 | 9 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
module Main where
import Data.Monoid
import Data.String
import Web.Spock.Safe
import Data.Aeson
import GHC.Generics
main :: IO ()
data User = User { name :: String, height :: Int } deriving Generic
instance ToJSON User
main =
runSpock 8080 $ spockT id $
do get root $
Web.Spock.Safe.json (map (User "10") [4,5,6,6])
get ("hello" <//> var) $ \name ->
text ("Hello " <> name <> "!")
|
justinholmes/haskell-playground
|
src/Main.hs
|
apache-2.0
| 491 | 0 | 13 | 117 | 171 | 94 | 77 | 17 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ru-RU">
<title>Form Handler | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
secdec/zap-extensions
|
addOns/formhandler/src/main/javahelp/org/zaproxy/zap/extension/formhandler/resources/help_ru_RU/helpset_ru_RU.hs
|
apache-2.0
| 973 | 83 | 52 | 159 | 396 | 209 | 187 | -1 | -1 |
module AlecSequences.A271328Spec (main, spec) where
import Test.Hspec
import AlecSequences.A271328 (a271328)
main :: IO ()
main = hspec spec
spec = describe "A271328" $
it "correctly computes the first 20 elements" $
take 20 (map a271328 [1..]) `shouldBe` expectedValue where
expectedValue = [1,5,10,17,28,37,50,65,82,106,122,145,170,197,228,257,294,325,362,406]
|
peterokagey/haskellOEIS
|
test/AlecSequences/A271328Spec.hs
|
apache-2.0
| 377 | 0 | 10 | 56 | 155 | 92 | 63 | 9 | 1 |
module Web.Socdiff.Github.Github where
import Control.Applicative
import qualified Data.Text as T
import Haxl.Core
import Web.Socdiff.Github.DataSource
-- | Fetch a list of followers for the given username
getFollowers :: T.Text -> GenHaxl u [T.Text]
getFollowers u = dataFetch (GetFollowers u)
-- | Fetch a list of repos for the given username
getRepos :: T.Text -> GenHaxl u [T.Text]
getRepos u = dataFetch (GetRepos u)
-- | Fetch a list of stargazers for the given repository
getStargazers :: T.Text -> T.Text -> GenHaxl u (T.Text, [T.Text])
getStargazers u r = (,) r <$> dataFetch (GetStargazers u r)
-- | Fetch a list of watchers for the given repository
getWatchers :: T.Text -> T.Text -> GenHaxl u (T.Text, [T.Text])
getWatchers u r = (,) r <$> dataFetch (GetWatchers u r)
|
relrod/socdiff
|
src/Web/Socdiff/Github/Github.hs
|
bsd-2-clause
| 785 | 0 | 10 | 131 | 245 | 133 | 112 | 13 | 1 |
module GTKMainWindow where
import GTKContext
import Control.Monad.Trans
import Control.Monad.Trans.Reader
import qualified GI.Gtk as Gtk
setupMainWindow :: ReaderT GTKContext IO ()
setupMainWindow = do
o <- gtkGetObj Gtk.Window "window1"
_ <- liftIO $ Gtk.widgetShow o
_ <- liftIO $ Gtk.onWidgetDestroy o Gtk.mainQuit
return ()
|
nbrk/ld
|
executable/GTKMainWindow.hs
|
bsd-2-clause
| 342 | 0 | 10 | 57 | 105 | 55 | 50 | 11 | 1 |
import Network.Wai.Handler.Snap
import Controller
main :: IO ()
main = putStrLn "Loaded" >> withLounge (run 3000)
|
fortytools/lounge
|
snap-server.hs
|
bsd-2-clause
| 116 | 0 | 8 | 18 | 42 | 22 | 20 | 4 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
module HERMIT.Dictionary.Function
( externals
, appArgM
, buildAppM
, buildAppsM
, buildCompositionT
, buildFixT
, buildIdT
, staticArgR
, staticArgPosR
, staticArgPredR
, staticArgTypesR
) where
import Control.Arrow
import Control.Monad
import Control.Monad.IO.Class
import Data.List (nub, intercalate, intersect, partition, transpose)
import Data.Maybe (isNothing)
import Data.String (fromString)
import HERMIT.Context
import HERMIT.Core
import HERMIT.External
import HERMIT.GHC
import HERMIT.Kure
import HERMIT.Monad
import HERMIT.Name
import HERMIT.Dictionary.Common
externals :: [External]
externals =
[ external "static-arg" (promoteDefR staticArgR :: RewriteH LCore)
[ "perform the static argument transformation on a recursive function." ]
, external "static-arg-types" (promoteDefR staticArgTypesR :: RewriteH LCore)
[ "perform the static argument transformation on a recursive function, only transforming type arguments." ]
, external "static-arg-pos" (promoteDefR . staticArgPosR :: [Int] -> RewriteH LCore)
[ "perform the static argument transformation on a recursive function, only transforming the arguments specified (by index)." ]
]
------------------------------------------------------------------------------------------------------
-- | Traditional Static Argument Transformation
staticArgR :: (AddBindings c, ExtendPath c Crumb, HasEmptyContext c, ReadPath c Crumb, MonadCatch m, MonadUnique m)
=> Rewrite c m CoreDef
staticArgR = staticArgPredR (return . map fst)
-- | Static Argument Transformation that only considers type arguments to be static.
staticArgTypesR :: (AddBindings c, ExtendPath c Crumb, HasEmptyContext c, ReadPath c Crumb, MonadCatch m, MonadUnique m)
=> Rewrite c m CoreDef
staticArgTypesR = staticArgPredR (return . map fst . filter (isTyVar . snd))
-- | Static Argument Transformations which requires that arguments in the given position are static.
staticArgPosR :: (AddBindings c, ExtendPath c Crumb, HasEmptyContext c, ReadPath c Crumb, MonadCatch m, MonadUnique m)
=> [Int] -> Rewrite c m CoreDef
staticArgPosR is' = staticArgPredR $ \ss' -> let is = nub is'
ss = map fst ss'
in if is == (is `intersect` ss)
then return is
else fail $ "args " ++ commas (filter (`notElem` ss) is) ++ " are not static."
-- | Generalized Static Argument Transformation, which allows static arguments to be filtered.
staticArgPredR :: forall c m. (AddBindings c, ExtendPath c Crumb, HasEmptyContext c, ReadPath c Crumb
, MonadCatch m, MonadUnique m)
=> ([(Int, Var)] -> m [Int]) -- ^ given list of static args and positions, decided which to transform
-> Rewrite c m CoreDef
staticArgPredR decide = prefixFailMsg "static-arg failed: " $ do
Def f rhs <- idR
let (bnds, body) = collectBinders rhs
guardMsg (notNull bnds) "rhs is not a function"
contextonlyT $ \ c -> do
let bodyContext = foldl (flip addLambdaBinding) c bnds
callPatsT :: Transform c m CoreExpr [[CoreExpr]]
callPatsT = extractT $ collectPruneT
(promoteExprT $ callPredT (const . (== f)) >>> arr snd :: Transform c m Core [CoreExpr])
callPats <- applyT callPatsT bodyContext body
let argExprs = transpose callPats
numCalls = length callPats
allBinds = zip [0..] bnds
staticBinds = [ (i,b) | ((i,b),exprs) <- zip allBinds $ argExprs ++ repeat []
, length exprs == numCalls && isStatic b exprs ]
-- ensure argument is present in every call (partial applications boo)
isStatic _ [] = True -- all were static
isStatic b ((Var b'):es) | b == b' = isStatic b es
isStatic b ((Type (TyVarTy v)):es) | b == v = isStatic b es
isStatic b ((Coercion (CoVarCo v)):es) | b == v = isStatic b es
isStatic _ _ = False -- not a simple repass, so dynamic
chosen <- decide staticBinds
let choices = map fst staticBinds
guardMsg (notNull chosen) "no arguments selected for transformation."
guardMsg (all (`elem` choices) chosen)
$ "args " ++ commas choices ++ " are static, but " ++ commas chosen ++ " were selected."
let (chosenBinds, dynBinds) = partition ((`elem` chosen) . fst) allBinds
(ps, dbnds) = unzip dynBinds
unboundTys = concat [ [ (i,i') | (i',b') <- dynBinds, i' < i , b' `elem` fvs ]
| (i,b) <- chosenBinds, let fvs = varSetElems (varTypeTyVars b) ]
guardMsg (null unboundTys)
$ "type variables in args " ++ commas (nub $ map fst unboundTys) ++ " would become unbound unless args "
++ commas (nub $ map snd unboundTys) ++ " are included in the transformation."
wkr <- newIdH (unqualifiedName f ++ "'") (exprType (mkCoreLams dbnds body))
let replaceCall :: Monad m => Rewrite c m CoreExpr
replaceCall = do
(_,exprs) <- callPredT (const . (== f))
return $ mkApps (Var wkr) [ e | (p,e) <- zip [0..] exprs, (p::Int) `elem` ps ]
body' <- applyT (extractR $ prunetdR (promoteExprR replaceCall :: Rewrite c m Core)) bodyContext body
return $ Def f $ mkCoreLams bnds $ Let (Rec [(wkr, mkCoreLams dbnds body')])
$ mkApps (Var wkr) (varsToCoreExprs dbnds)
------------------------------------------------------------------------------
-- | Get the nth argument of an application. Arg 0 is the function being applied.
appArgM :: Monad m => Int -> CoreExpr -> m CoreExpr
appArgM n e | n < 0 = fail "appArgM: arg must be non-negative"
| otherwise = let (fn,args) = collectArgs e
l = fn : args
in if n > length args
then fail "appArgM: not enough arguments"
else return $ l !! n
-- | Build composition of two functions.
buildCompositionT :: (BoundVars c, HasHermitMEnv m, LiftCoreM m, MonadCatch m, MonadIO m, MonadThings m)
=> CoreExpr -> CoreExpr -> Transform c m x CoreExpr
buildCompositionT f g = do
composeId <- findIdT $ fromString "Data.Function.."
fDot <- prefixFailMsg "building (.) f failed:" $ buildAppM (varToCoreExpr composeId) f
prefixFailMsg "building f . g failed:" $ buildAppM fDot g
buildAppsM :: MonadCatch m => CoreExpr -> [CoreExpr] -> m CoreExpr
buildAppsM = foldM buildAppM
-- | Given expression for f and for x, build f x, figuring out the type arguments.
buildAppM :: MonadCatch m => CoreExpr -> CoreExpr -> m CoreExpr
buildAppM f x = do
(vsF, domF, _) <- splitFunTypeM (exprType f)
let (vsX, xTy) = splitForAllTys (exprType x)
allTvs = vsF ++ vsX
bindFn v = if v `elem` allTvs then BindMe else Skolem
sub <- maybe (fail "buildAppM - domain of f and type of x do not unify")
return
(tcUnifyTys bindFn [domF] [xTy])
f' <- substOrApply f [ (v, Type $ substTyVar sub v) | v <- vsF ]
x' <- substOrApply x [ (v, Type $ substTyVar sub v) | v <- vsX ]
let vs = [ v | v <- vsF ++ vsX, isNothing $ lookupTyVar sub v ] -- things we should stick back on as foralls
-- TODO: make sure vsX don't capture anything in f'
-- and vsF' doesn't capture anything in x'
return $ mkCoreLams vs $ mkCoreApp f' x'
-- | Given expression for f, build fix f.
buildFixT :: (BoundVars c, LiftCoreM m, HasHermitMEnv m, MonadCatch m, MonadIO m, MonadThings m)
=> CoreExpr -> Transform c m x CoreExpr
buildFixT f = do
(tvs, ty) <- endoFunExprTypeM f
fixId <- findIdT $ fromString "Data.Function.fix"
f' <- substOrApply f [ (v, varToCoreExpr v) | v <- tvs ]
return $ mkCoreLams tvs $ mkCoreApps (varToCoreExpr fixId) [Type ty, f']
-- | Build an expression that is the monomorphic id function for given type.
buildIdT :: (BoundVars c, LiftCoreM m, HasHermitMEnv m, MonadCatch m, MonadIO m, MonadThings m)
=> Type -> Transform c m x CoreExpr
buildIdT ty = do
idId <- findIdT $ fromString "Data.Function.id"
return $ mkCoreApp (varToCoreExpr idId) (Type ty)
------------------------------------------------------------------------------
commas :: Show a => [a] -> String
commas = intercalate "," . map show
-- | Like mkCoreApps, but automatically beta-reduces when possible.
substOrApply :: Monad m => CoreExpr -> [(Var,CoreExpr)] -> m CoreExpr
substOrApply e [] = return e
substOrApply (Lam b e) ((v,ty):r) = if b == v
then substOrApply e r >>= return . substCoreExpr b ty
else fail $ "substOrApply: unexpected binder - "
++ unqualifiedName b ++ " - " ++ unqualifiedName v
substOrApply e rest = return $ mkCoreApps e (map snd rest)
------------------------------------------------------------------------------
|
beni55/hermit
|
src/HERMIT/Dictionary/Function.hs
|
bsd-2-clause
| 9,522 | 0 | 23 | 2,746 | 2,641 | 1,353 | 1,288 | 143 | 5 |
module REPL.REPL
(
repl
) where
import System.IO (hFlush, stdout)
import Control.Monad (when)
import System.Exit (exitSuccess)
import Data.Char (toLower)
import Rating
import REPL.Commands
import REPL.Exit
import REPL.List
import REPL.NPC
import REPL.Set
import REPL.Unset
import REPL.Lock
import REPL.Update
import REPL.Stock
import REPL.Suggest
repl :: Rating -> [Int]-> IO ()
repl _ [] = error "empty list given."
repl r (l:ls) = do
putStr $ "AlbanKnights(" ++ show l ++ "): "
hFlush stdout
input <- getLine
when (isExit input) $ do
putExitMessage r
exitSuccess
case words $ map toLower input of
[] -> repl r ls
(command:args) -> case dispatch command args r of
Left str -> do putStrLn str
repl r ls
Right r' -> repl r' ls
dispatch :: String -> [String] -> Rating -> Either String Rating
dispatch cmd args r
| isList cmd = list r
| isSet cmd = set args r
| isUnset cmd = unset args r
| isUpdate cmd = update r
| isLock cmd = lock args r
| isStock cmd = stock args r
| isSuggest cmd = suggest args r
| isNPC cmd = npc cmd args r
| otherwise = Left $ "unknown command: '" ++ cmd ++ "'"
|
sandmark/AlbanKnights
|
src/REPL/REPL.hs
|
bsd-3-clause
| 1,203 | 0 | 15 | 316 | 495 | 237 | 258 | 44 | 3 |
module Purescript.Ide.CodecJSON where
import Purescript.Ide.Externs (ExternDecl(..))
import Data.Aeson
instance ToJSON ExternDecl where
toJSON (FunctionDecl n t) = object ["name" .= n, "type" .= t]
toJSON (ModuleDecl n t) = object ["name" .= n, "type" .= t]
toJSON (DataDecl n t) = object ["name" .= n, "type" .= t]
toJSON (Dependency n names) = object ["module" .= n, "names" .= names]
toJSON (FixityDeclaration f p n) = object ["name" .= n, "fixity" .= show f, "precedence" .= p]
|
passy/psc-ide
|
lib/Purescript/Ide/CodecJSON.hs
|
bsd-3-clause
| 526 | 0 | 9 | 121 | 212 | 113 | 99 | 9 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeApplications #-}
module HaskellCI.Config.ConstraintSet where
import HaskellCI.Prelude
import qualified Distribution.FieldGrammar as C
import HaskellCI.Newtypes
import HaskellCI.OptionsGrammar
data ConstraintSet = ConstraintSet
{ csName :: String
, csGhcVersions :: VersionRange
, csConstraints :: [String] -- we parse these simply as strings
, csTests :: Bool
, csRunTests :: Bool
, csDocspec :: Bool
, csBenchmarks :: Bool
, csHaddock :: Bool
}
deriving (Show, Generic)
emptyConstraintSet :: String -> ConstraintSet
emptyConstraintSet n = ConstraintSet n anyVersion [] False False False False False
-------------------------------------------------------------------------------
-- Grammar
-------------------------------------------------------------------------------
constraintSetGrammar
:: ( OptionsGrammar c g, Applicative (g ConstraintSet)
)
=> String -> g ConstraintSet ConstraintSet
constraintSetGrammar name = ConstraintSet name
<$> C.optionalFieldDef "ghc" (field @"csGhcVersions") anyVersion
<*> C.monoidalFieldAla "constraints" (C.alaList' C.CommaVCat NoCommas) (field @"csConstraints")
<*> C.booleanFieldDef "tests" (field @"csTests") False
<*> C.booleanFieldDef "run-tests" (field @"csRunTests") False
<*> C.booleanFieldDef "docspec" (field @"csDocspec") False
<*> C.booleanFieldDef "benchmarks" (field @"csBenchmarks") False
<*> C.booleanFieldDef "haddock" (field @"csHaddock") False
|
hvr/multi-ghc-travis
|
src/HaskellCI/Config/ConstraintSet.hs
|
bsd-3-clause
| 1,838 | 0 | 15 | 518 | 351 | 190 | 161 | 31 | 1 |
-- Compiler Toolkit: finite maps
--
-- Author : Manuel M. T. Chakravarty
-- Created: 23 March 95
--
-- Version $Revision: 1.12 $ from $Date: 2003/04/16 11:11:46 $
--
-- Copyright (c) [1995..2000] Manuel M. T. Chakravarty
--
-- This file is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This file is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
--- DESCRIPTION ---------------------------------------------------------------
--
-- This module provides finite maps as an abstract data type. The idea is
-- taken from the GHC module `FiniteMap' and the implementation follows
-- closely the ideas found in ``Efficient sets---a balancing act'' from
-- Stephan Adams in ``Journal of Functional Programming'', 3(4), 1993,
-- drawing also from the longer exposition in ``Implementing Sets Efficiently
-- in a Functional Language'' also from Stephan Adams, CSTR 92-10 in Technical
-- Report Series, Unversity of Southampton, Department of Electronics and
-- Computer Science, U.K.
--
--- DOCU ----------------------------------------------------------------------
--
-- language: Haskell 98
--
-- * This implementation is based in bounded balance binary trees. They
-- achieve good balancing while being simpler to maintain than AVL trees.
--
-- * The implementation design is based on the idea of smart constructors,
-- i.e., constructors that guarantee the compliance of the result with some
-- constraints applied to the construction of the data type.
--
--- TODO ----------------------------------------------------------------------
--
-- * `joinFM' would be a bit more efficient if the ``hedge union'' algorithm
-- of the above mentioned technical report would be implemented.
--
module Text.CTK.FiniteMaps (FiniteMap, zeroFM, unitFM, listToFM, listToCombFM, joinFM,
joinCombFM, sizeFM, addToFM, addToCombFM, delFromFM, diffFM,
intersectFM, intersectCombFM, mapFM, foldFM, filterFM,
lookupFM, lookupDftFM, toListFM, domFM, imageFM)
where
-- finite maps are represented as ordered binary trees; each node represents
-- a key-element pair in the map, its children contain pair with smaller and
-- greater keys respectively (this requires an ordering relation on the keys);
-- all keys in a tree are distinct
--
data (Ord key) =>
FiniteMap key elem = Leaf
| Node key -- this key
elem -- assoc with key
Int -- size >= 1
(FiniteMap key elem) -- smaller keys
(FiniteMap key elem) -- greater keys
-- we define two finite maps to be equal if they range over the same domain
--
--instance Ord k => Eq (FiniteMap k e) where
-- fm1 == fm2 = ((map fst . toListFM) $ fm1) == ((map fst . toListFM) $ fm2)
instance (Ord k, Eq e) => Eq (FiniteMap k e) where
fm1 == fm2 = (toListFM fm1) == (toListFM fm2)
-- we define a total ordering on finite maps by lifting the lexicographical
-- ordering over their domains (which we assume to be sorted)
--
--instance Ord k => Ord (FiniteMap k e) where
-- fm1 <= fm2 = ((map fst . toListFM) $ fm1) <= ((map fst . toListFM) $ fm2)
instance (Ord k, Ord e) => Ord (FiniteMap k e) where
fm1 <= fm2 = (toListFM fm1) <= (toListFM fm2)
instance (Show k, Show e, Ord k) => Show (FiniteMap k e) where
showsPrec = toShowS -- defined below
-- weight ratio is respected by the balanced tree, i.e., no subtree will ever
-- contain `ratio' times more elements than its sister
--
ratio :: Int
ratio = 5
-- this gives us an empty map
--
zeroFM :: Ord k => FiniteMap k e
zeroFM = Leaf
-- a map with a single element
--
unitFM :: Ord k => k -> e -> FiniteMap k e
unitFM k e = Node k e 1 Leaf Leaf
-- makes a list of key-element pairs into a finite map
--
-- in case of duplicates, the last is taken
--
listToFM :: Ord k => [(k, e)] -> FiniteMap k e
listToFM = listToCombFM const
-- makes a list of key-element pairs into a finite map where collisions are
-- resolved by an explicit combiner fun
--
-- the combiner expects the new element as its first argument
--
listToCombFM :: Ord k => (e -> e -> e) -> [(k, e)] -> FiniteMap k e
listToCombFM c = foldl addOnePair zeroFM
where
addOnePair m (k, e) = addToCombFM c k e m
-- the number of elements in the map
--
sizeFM :: Ord k => FiniteMap k e -> Int
sizeFM Leaf = 0
sizeFM (Node _ _ s _ _) = s
-- builds a node that automagically contains the right size
--
smartNode :: Ord k
=> k -> e -> (FiniteMap k e) -> (FiniteMap k e) -> (FiniteMap k e)
smartNode k e sm gr = Node k e (1 + sizeFM sm + sizeFM gr) sm gr
-- builds a node that automagically balances the tree if necessary and inserts
-- the right size; ONLY ONE of the subtrees is allowed to be off balance and
-- only by ONE element
--
smarterNode :: Ord k
=> k -> e -> (FiniteMap k e) -> (FiniteMap k e) -> (FiniteMap k e)
smarterNode k e sm gr =
let
sm_n = sizeFM sm
gr_n = sizeFM gr
in
if (sm_n + gr_n) < 2 -- very small tree (one part is a leaf)
then
smartNode k e sm gr -- => construct directly
else
if gr_n > (ratio * sm_n) -- child with greater keys is too big
then -- => rotate left
let
Node _ _ _ gr_sm gr_gr = gr
gr_sm_n = sizeFM gr_sm
gr_gr_n = sizeFM gr_gr
in
if gr_sm_n < gr_gr_n then single_L k e sm gr else double_L k e sm gr
else
if sm_n > (ratio * gr_n) -- child with smaller keys is too big
then -- => rotate right
let
Node _ _ _ sm_sm sm_gr = sm
sm_sm_n = sizeFM sm_sm
sm_gr_n = sizeFM sm_gr
in
if sm_gr_n < sm_sm_n then single_R k e sm gr else double_R k e sm gr
else
smartNode k e sm gr -- else nearly balanced => construct directly
where
single_L ka ea x (Node kb eb _ y z) = smartNode kb eb
(smartNode ka ea x y)
z
double_L ka ea x (Node kc ec _ (Node kb eb _ y1 y2) z) =
smartNode kb eb
(smartNode ka ea x y1)
(smartNode kc ec y2 z)
single_R kb eb (Node ka ea _ x y) z = smartNode ka ea
x
(smartNode kb eb y z)
double_R kc ec (Node ka ea _ x (Node kb eb _ y1 y2)) z =
smartNode kb eb
(smartNode ka ea x y1)
(smartNode kc ec y2 z)
-- add the given key-element pair to the map
--
-- overrides previous entries
--
addToFM :: Ord k => k -> e -> FiniteMap k e -> FiniteMap k e
addToFM = addToCombFM const
-- add the given key-element pair to the map where collisions are resolved by
-- an explicit combiner fun
--
-- the combiner expects the new element as its first argument
--
addToCombFM :: Ord k
=> (e -> e -> e) -> k -> e -> FiniteMap k e -> FiniteMap k e
addToCombFM c k e Leaf = unitFM k e
addToCombFM c k e (Node k' e' n sm gr)
| k < k' = smarterNode k' e'
(addToCombFM c k e sm)
gr
| k > k' = smarterNode k' e'
sm
(addToCombFM c k e gr)
| otherwise = Node k (c e e') n sm gr
-- removes the key-element pair specified by the given key from a map
--
-- does not complain if the key is not in the map
--
delFromFM :: Ord k => k -> FiniteMap k e -> FiniteMap k e
delFromFM k Leaf = Leaf
delFromFM k (Node k' e' n sm gr)
| k < k' = smarterNode k' e' (delFromFM k sm) gr
| k > k' = smarterNode k' e' sm (delFromFM k gr)
| otherwise = smartGlue sm gr
-- given two maps where all keys in the left are smaller than those in the
-- right and they are not too far out of balance (within ratio), glue them
-- into one map
--
smartGlue :: Ord k => FiniteMap k e -> FiniteMap k e -> FiniteMap k e
smartGlue Leaf gr = gr
smartGlue sm Leaf = sm
smartGlue sm gr = let
(k, e, gr') = extractMin gr
in
smarterNode k e sm gr'
-- extract the association with the minimal key (i.e., leftmost in the tree)
-- and simultaneously return the map without this association
--
extractMin :: Ord k => FiniteMap k e -> (k, e, FiniteMap k e)
extractMin (Node k e _ Leaf gr) = (k, e, gr)
extractMin (Node k e _ sm gr) = let
(minK, minE, sm') = extractMin sm
in
(minK, minE, smarterNode k e sm' gr)
-- given two maps where all keys in the left are smaller than those in the
-- right, glue them into one map
--
glue :: Ord k => FiniteMap k e -> FiniteMap k e -> FiniteMap k e
glue Leaf gr = gr
glue sm Leaf = sm
glue sm@(Node k_sm e_sm n_sm sm_sm gr_sm)
gr@(Node k_gr e_gr n_gr sm_gr gr_gr)
| (ratio * n_sm) < n_gr
= smarterNode k_gr e_gr (glue sm sm_gr) gr_gr
| (ratio * n_gr) < n_sm
= smarterNode k_sm e_sm sm_sm (glue gr_sm gr)
| otherwise
= let
(k, e, gr') = extractMin gr
in
smarterNode k e sm gr'
-- builds a node that automagically balances the tree if necessary and inserts
-- the right size (just as `smarterNode'), BUT which is only applicable if the
-- two given maps do not overlap (in their key values) and the new, given key
-- lies between the keys in the first and the second map
--
-- its time complexity is proportional to the _difference_ in the height of
-- the two trees representing the given maps
--
smartestNode :: Ord k
=> k -> e -> (FiniteMap k e) -> (FiniteMap k e) -> (FiniteMap k e)
--
-- if any of both trees is too big (with respect to the ratio), we insert
-- into the other; otherwise, a simple creation of a new node is sufficient
--
smartestNode k e Leaf gr = addToFM k e gr
smartestNode k e sm Leaf = addToFM k e sm
smartestNode k e sm@(Node k_sm e_sm n_sm sm_sm gr_sm)
gr@(Node k_gr e_gr n_gr sm_gr gr_gr)
| (ratio * n_sm) < n_gr
= smarterNode k_gr e_gr (smartestNode k e sm sm_gr) gr_gr
| (ratio * n_gr) < n_sm
= smarterNode k_sm e_sm sm_sm (smartestNode k e gr_sm gr)
| otherwise
= smartNode k e sm gr
-- joins two maps
--
-- entries in the left map shadow those in the right
--
joinFM :: Ord k => FiniteMap k e -> FiniteMap k e -> FiniteMap k e
--
-- explicitly coded, instead of using `joinCombFM', to avoid the `lookupFM'
-- for each element in the left map, which is unnecessary in this case
--
joinFM m Leaf = m
joinFM Leaf m = m
joinFM (Node k e _ sm gr) m = smartestNode k e sm' gr'
where
sm' = joinFM sm (smaller k m)
gr' = joinFM gr (greater k m)
-- joins two maps where collisions are resolved by an explicit combiner fun
--
joinCombFM :: Ord k
=> (e -> e -> e) -> FiniteMap k e -> FiniteMap k e -> FiniteMap k e
joinCombFM c m Leaf = m
joinCombFM c Leaf m = m
joinCombFM c (Node k e _ sm gr) m = smartestNode k e' sm' gr'
where
sm' = joinCombFM c sm (smaller k m)
gr' = joinCombFM c gr (greater k m)
e' = case lookupFM m k
of
Just f -> c e f
Nothing -> e
-- cut the part of the tree that is smaller than the given key out of the
-- map
--
smaller :: Ord k
=> k -> FiniteMap k e -> FiniteMap k e
smaller _ Leaf = Leaf
smaller k (Node k' e _ sm gr)
| k < k' = smaller k sm
| k > k' = smartestNode k' e sm (smaller k gr)
| otherwise = sm
-- cut the part of the tree that is greater than the given key out of the
-- map
--
greater :: Ord k
=> k -> FiniteMap k e -> FiniteMap k e
greater _ Leaf = Leaf
greater k (Node k' e _ sm gr)
| k > k' = greater k gr
| k < k' = smartestNode k' e (greater k sm) gr
| otherwise = gr
-- given two finite maps, yields a finite map containg all elements of the
-- first argument except those having a key that is contained in the second
-- map
--
diffFM :: Ord k => FiniteMap k e -> FiniteMap k e' -> FiniteMap k e
diffFM Leaf _ = Leaf
diffFM m Leaf = m
diffFM m (Node k _ _ sm gr) = glue (diffFM sm' sm) (diffFM gr' gr)
where
sm' = smaller k m
gr' = greater k m
-- given two finite maps, yield the map containing only entries of which the
-- keys are in both maps
--
-- the elements are taken from the left map
--
intersectFM :: Ord k => FiniteMap k e -> FiniteMap k e -> FiniteMap k e
intersectFM = intersectCombFM const
-- given two finite maps, yield the map containing only entries of which the
-- keys are in both maps
--
-- the corresponding elements of the two maps are combined using the given,
-- function
--
intersectCombFM :: Ord k
=> (e -> e -> e)
-> FiniteMap k e
-> FiniteMap k e
-> FiniteMap k e
intersectCombFM c _ Leaf = Leaf
intersectCombFM c Leaf _ = Leaf
intersectCombFM c (Node k e _ sm gr) m
| contained = smartestNode k (c e e') sm' gr'
| otherwise = glue sm' gr'
where
sm' = intersectCombFM c sm (smaller k m)
gr' = intersectCombFM c gr (greater k m)
(contained, e') = case lookupFM m k
of
Just f -> (True, f)
Nothing -> (False, undefined)
undefined = error "FiniteMaps: intersectCombFM: Undefined"
-- given a function on a finite maps elements and a finite map, yield the
-- finite map where every element is replaced as specified by the function
--
mapFM :: Ord k
=> (k -> e -> e') -> FiniteMap k e -> FiniteMap k e'
mapFM f Leaf = Leaf
mapFM f (Node k e n sm gr) = Node k (f k e) n (mapFM f sm) (mapFM f gr)
-- folds a finite map according to a given function and _neutral_ value (with
-- respect to the function) that is used for an empty map
--
foldFM :: Ord k
=> (k -> e -> a -> a) -> a -> FiniteMap k e -> a
foldFM f z Leaf = z
foldFM f z (Node k e _ sm gr) = foldFM f (f k e (foldFM f z gr)) sm
-- given a predicate and a finite map, yields the finite map containing all
-- key-element pairs satisfying the predicate
--
filterFM :: Ord k => (k -> e -> Bool) -> FiniteMap k e -> FiniteMap k e
filterFM p Leaf = Leaf
filterFM p (Node k e _ sm gr) | p k e = smartestNode k e sm' gr'
| otherwise = glue sm' gr'
where
sm' = filterFM p sm
gr' = filterFM p gr
-- given a map and a key, returns `Just e' iff the key associates to `e';
-- if the key is not in the map, `Nothing' is returned
--
lookupFM :: Ord k => FiniteMap k e -> k -> Maybe e
lookupFM Leaf _ = Nothing
lookupFM (Node k e _ sm gr) k' | k' == k = Just e
| k' < k = lookupFM sm k'
| k' > k = lookupFM gr k'
-- just as `lookupFM', but instead of returning a `Maybe' type, a default
-- value to be returned in case that the key is not in the map has to be
-- specified
--
lookupDftFM :: Ord k => FiniteMap k e -> e -> k -> e
lookupDftFM map e k = case lookupFM map k
of
Just e' -> e'
Nothing -> e
-- given a finite map, yields a list of the key-element pairs
--
toListFM :: Ord k => FiniteMap k e -> [(k, e)]
toListFM = foldFM (\k e kes -> (k, e):kes) []
-- |Yield the domain of a finite map as a list
--
domFM :: Ord k => FiniteMap k e -> [k]
domFM = map fst . toListFM
-- |Yield the image of a finite map as a list
--
imageFM :: Ord k => FiniteMap k e -> [e]
imageFM = map snd . toListFM
-- pretty print routine (used as a method in FiniteMap's instance of `Show')
--
toShowS :: (Show a, Show b, Ord a) => Int -> FiniteMap a b -> ShowS
toShowS _ fm = format fm 0
where
format Leaf _ = id
format (Node k e n sm gr) indent =
let
this = showString (take indent (repeat ' '))
. shows k . showString " --> " . shows e
. showString " (size: " . shows n
. showString ")\n"
in
this
. format sm (indent + 2)
. format gr (indent + 2)
|
mwotton/ctkl
|
src/Text/CTK/FiniteMaps.hs
|
bsd-3-clause
| 16,301 | 174 | 15 | 4,750 | 4,342 | 2,248 | 2,094 | 219 | 6 |
import GL
import Compile0
example :: GCM ()
example = do
a <- createPort :: GCM (Port Int)
b <- createPort
component $ do
assert $ val a === val b + 1
output a "a"
output b "b"
|
GRACeFUL-project/DSL-WP
|
deliverables/d4.3/test.hs
|
bsd-3-clause
| 194 | 0 | 13 | 56 | 92 | 42 | 50 | 10 | 1 |
{-|
Module : Idris.Core.Evaluate
Description : Evaluate Idris expressions.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE BangPatterns, DeriveGeneric, FlexibleInstances,
MultiParamTypeClasses, PatternGuards #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.Core.Evaluate(normalise, normaliseTrace, normaliseC,
normaliseAll, normaliseBlocking, toValue, quoteTerm,
rt_simplify, simplify, specialise, hnf, convEq, convEq',
Def(..), CaseInfo(..), CaseDefs(..),
Accessibility(..), Injectivity, Totality(..), PReason(..), MetaInformation(..),
Context, initContext, ctxtAlist, next_tvar,
addToCtxt, setAccess, setInjective, setTotal, setMetaInformation, addCtxtDef, addTyDecl,
addDatatype, addCasedef, simplifyCasedef, addOperator,
lookupNames, lookupTyName, lookupTyNameExact, lookupTy, lookupTyExact,
lookupP, lookupP_all, lookupDef, lookupNameDef, lookupDefExact, lookupDefAcc, lookupDefAccExact, lookupVal,
mapDefCtxt,
lookupTotal, lookupTotalExact, lookupInjectiveExact,
lookupNameTotal, lookupMetaInformation, lookupTyEnv, isTCDict,
isCanonical, isDConName, canBeDConName, isTConName, isConName, isFnName,
Value(..), Quote(..), initEval, uniqueNameCtxt, uniqueBindersCtxt, definitions,
isUniverse) where
import Idris.Core.CaseTree
import Idris.Core.TT
import Control.Applicative hiding (Const)
import Control.Monad.State
import Data.Binary hiding (get, put)
import qualified Data.Binary as B
import Data.Maybe (listToMaybe)
import Debug.Trace
import GHC.Generics (Generic)
data EvalState = ES { limited :: [(Name, Int)],
nexthole :: Int,
blocking :: Bool }
deriving Show
type Eval a = State EvalState a
data EvalOpt = Spec
| HNF
| Simplify
| AtREPL
| RunTT
deriving (Show, Eq)
initEval = ES [] 0 False
-- VALUES (as HOAS) ---------------------------------------------------------
-- | A HOAS representation of values
data Value = VP NameType Name Value
| VV Int
-- True for Bool indicates safe to reduce
| VBind Bool Name (Binder Value) (Value -> Eval Value)
-- For frozen let bindings when simplifying
| VBLet Int Name Value Value Value
| VApp Value Value
| VType UExp
| VUType Universe
| VErased
| VImpossible
| VConstant Const
| VProj Value Int
-- | VLazy Env [Value] Term
| VTmp Int
instance Show Value where
show x = show $ evalState (quote 100 x) initEval
instance Show (a -> b) where
show x = "<<fn>>"
-- THE EVALUATOR ------------------------------------------------------------
-- The environment is assumed to be "locally named" - i.e., not de Bruijn
-- indexed.
-- i.e. it's an intermediate environment that we have while type checking or
-- while building a proof.
-- | Normalise fully type checked terms (so, assume all names/let bindings resolved)
normaliseC :: Context -> Env -> TT Name -> TT Name
normaliseC ctxt env t
= evalState (do val <- eval False ctxt [] (map finalEntry env) t []
quote 0 val) initEval
-- | Normalise everything, whether abstract, private or public
normaliseAll :: Context -> Env -> TT Name -> TT Name
normaliseAll ctxt env t
= evalState (do val <- eval False ctxt [] (map finalEntry env) t [AtREPL]
quote 0 val) initEval
-- | As normaliseAll, but with an explicit list of names *not* to reduce
normaliseBlocking :: Context -> Env -> [Name] -> TT Name -> TT Name
normaliseBlocking ctxt env blocked t
= evalState (do val <- eval False ctxt (map (\n -> (n, 0)) blocked)
(map finalEntry env) t [AtREPL]
quote 0 val) initEval
normalise :: Context -> Env -> TT Name -> TT Name
normalise = normaliseTrace False
normaliseTrace :: Bool -> Context -> Env -> TT Name -> TT Name
normaliseTrace tr ctxt env t
= evalState (do val <- eval tr ctxt [] (map finalEntry env) (finalise t) []
quote 0 val) initEval
toValue :: Context -> Env -> TT Name -> Value
toValue ctxt env t
= evalState (eval False ctxt [] (map finalEntry env) t []) initEval
quoteTerm :: Value -> TT Name
quoteTerm val = evalState (quote 0 val) initEval
-- Return a specialised name, and an updated list of reductions available,
-- so that the caller can tell how much specialisation was achieved.
specialise :: Context -> Env -> [(Name, Int)] -> TT Name ->
(TT Name, [(Name, Int)])
specialise ctxt env limits t
= let (tm, st) =
runState (do val <- eval False ctxt []
(map finalEntry env) (finalise t)
[Spec]
quote 0 val) (initEval { limited = limits }) in
(tm, limited st)
-- | Like normalise, but we only reduce functions that are marked as okay to
-- inline (and probably shouldn't reduce lets?)
-- 20130908: now only used to reduce for totality checking. Inlining should
-- be done elsewhere.
simplify :: Context -> Env -> TT Name -> TT Name
simplify ctxt env t
= evalState (do val <- eval False ctxt [(sUN "lazy", 0),
(sUN "force", 0),
(sUN "Force", 0),
(sUN "assert_smaller", 0),
(sUN "assert_total", 0),
(sUN "par", 0),
(sUN "prim__syntactic_eq", 0),
(sUN "fork", 0)]
(map finalEntry env) (finalise t)
[Simplify]
quote 0 val) initEval
-- | Simplify for run-time (i.e. basic inlining)
rt_simplify :: Context -> Env -> TT Name -> TT Name
rt_simplify ctxt env t
= evalState (do val <- eval False ctxt [(sUN "lazy", 0),
(sUN "force", 0),
(sUN "Force", 0),
(sUN "par", 0),
(sUN "prim__syntactic_eq", 0),
(sUN "prim_fork", 0)]
(map finalEntry env) (finalise t)
[RunTT]
quote 0 val) initEval
-- | Reduce a term to head normal form
hnf :: Context -> Env -> TT Name -> TT Name
hnf ctxt env t
= evalState (do val <- eval False ctxt []
(map finalEntry env)
(finalise t) [HNF]
quote 0 val) initEval
-- unbindEnv env (quote 0 (eval ctxt (bindEnv env t)))
finalEntry :: (Name, Binder (TT Name)) -> (Name, Binder (TT Name))
finalEntry (n, b) = (n, fmap finalise b)
bindEnv :: EnvTT n -> TT n -> TT n
bindEnv [] tm = tm
bindEnv ((n, Let t v):bs) tm = Bind n (NLet t v) (bindEnv bs tm)
bindEnv ((n, b):bs) tm = Bind n b (bindEnv bs tm)
unbindEnv :: EnvTT n -> TT n -> TT n
unbindEnv [] tm = tm
unbindEnv (_:bs) (Bind n b sc) = unbindEnv bs sc
unbindEnv env tm = error "Impossible case occurred: couldn't unbind env."
usable :: Bool -- specialising
-> Int -- Reduction depth limit (when simplifying/at REPL)
-> Name -> [(Name, Int)] -> Eval (Bool, [(Name, Int)])
-- usable _ _ ns@((MN 0 "STOP", _) : _) = return (False, ns)
usable False depthlimit n [] = return (True, [])
usable True depthlimit n ns
= do ES ls num b <- get
if b then return (False, ns)
else case lookup n ls of
Just 0 -> return (False, ns)
Just i -> return (True, ns)
_ -> return (False, ns)
usable False depthlimit n ns
= case lookup n ns of
Just 0 -> return (False, ns)
Just i -> return $ (True, (n, abs (i-1)) : filter (\ (n', _) -> n/=n') ns)
_ -> return $ (True, (n, depthlimit) : filter (\ (n', _) -> n/=n') ns)
fnCount :: Int -> Name -> Eval ()
fnCount inc n
= do ES ls num b <- get
case lookup n ls of
Just i -> do put $ ES ((n, (i - inc)) :
filter (\ (n', _) -> n/=n') ls) num b
_ -> return ()
setBlock :: Bool -> Eval ()
setBlock b = do ES ls num _ <- get
put (ES ls num b)
deduct = fnCount 1
reinstate = fnCount (-1)
-- | Evaluate in a context of locally named things (i.e. not de Bruijn indexed,
-- such as we might have during construction of a proof)
-- The (Name, Int) pair in the arguments is the maximum depth of unfolding of
-- a name. The corresponding pair in the state is the maximum number of
-- unfoldings overall.
eval :: Bool -> Context -> [(Name, Int)] -> Env -> TT Name ->
[EvalOpt] -> Eval Value
eval traceon ctxt ntimes genv tm opts = ev ntimes [] True [] tm where
spec = Spec `elem` opts
simpl = Simplify `elem` opts
runtime = RunTT `elem` opts
atRepl = AtREPL `elem` opts
hnf = HNF `elem` opts
-- returns 'True' if the function should block
-- normal evaluation should return false
blockSimplify (CaseInfo inl always dict) n stk
| runtime
= if always then False
else not (inl || dict) || elem n stk
| simpl
= (not (inl || dict) || elem n stk)
|| (n == sUN "prim__syntactic_eq")
| otherwise = False
getCases cd | simpl = cases_totcheck cd
| runtime = cases_runtime cd
| otherwise = cases_compiletime cd
ev ntimes stk top env (P _ n ty)
| Just (Let t v) <- lookup n genv = ev ntimes stk top env v
ev ntimes_in stk top env (P Ref n ty)
| not top && hnf = liftM (VP Ref n) (ev ntimes stk top env ty)
| otherwise
= do let limit = if simpl then 100 else 10000
(u, ntimes) <- usable spec limit n ntimes_in
let red = u && (tcReducible n ctxt || spec || atRepl || runtime
|| sUN "assert_total" `elem` stk)
if red then
do let val = lookupDefAcc n (spec || atRepl || runtime) ctxt
case val of
[(Function _ tm, Public)] ->
ev ntimes (n:stk) True env tm
[(TyDecl nt ty, _)] -> do vty <- ev ntimes stk True env ty
return $ VP nt n vty
[(CaseOp ci _ _ _ _ cd, acc)]
| (acc == Public || acc == Hidden || sUN "assert_total" `elem` stk) &&
null (fst (cases_totcheck cd)) -> -- unoptimised version
let (ns, tree) = getCases cd in
if blockSimplify ci n stk
then liftM (VP Ref n) (ev ntimes stk top env ty)
else -- traceWhen runtime (show (n, ns, tree)) $
do c <- evCase ntimes n (n:stk) top env ns [] tree
case c of
(Nothing, _) -> liftM (VP Ref n) (ev ntimes stk top env ty)
(Just v, _) -> return v
_ -> liftM (VP Ref n) (ev ntimes stk top env ty)
else liftM (VP Ref n) (ev ntimes stk top env ty)
ev ntimes stk top env (P nt n ty)
= liftM (VP nt n) (ev ntimes stk top env ty)
ev ntimes stk top env (V i)
| i < length env && i >= 0 = return $ snd (env !! i)
| otherwise = return $ VV i
ev ntimes stk top env (Bind n (Let t v) sc)
| not runtime || occurrences n sc < 2
= do v' <- ev ntimes stk top env v --(finalise v)
sc' <- ev ntimes stk top ((n, v') : env) sc
wknV (-1) sc'
| otherwise
= do t' <- ev ntimes stk top env t
v' <- ev ntimes stk top env v --(finalise v)
-- use Tmp as a placeholder, then make it a variable reference
-- again when evaluation finished
hs <- get
let vd = nexthole hs
put (hs { nexthole = vd + 1 })
sc' <- ev ntimes stk top ((n, VP Bound (sMN vd "vlet") VErased) : env) sc
return $ VBLet vd n t' v' sc'
ev ntimes stk top env (Bind n (NLet t v) sc)
= do t' <- ev ntimes stk top env (finalise t)
v' <- ev ntimes stk top env (finalise v)
sc' <- ev ntimes stk top ((n, v') : env) sc
return $ VBind True n (Let t' v') (\x -> return sc')
ev ntimes stk top env (Bind n b sc)
= do b' <- vbind env b
let n' = uniqueName n (map fst genv ++ map fst env)
return $ VBind True -- (vinstances 0 sc < 2)
n' b' (\x -> ev ntimes stk False ((n', x):env) sc)
where vbind env t
= fmapMB (\tm -> ev ntimes stk top env (finalise tm)) t
-- block reduction immediately under codata (and not forced)
ev ntimes stk top env
(App _ (App _ (App _ d@(P _ (UN dly) _) l@(P _ (UN lco) _)) t) arg)
| dly == txt "Delay" && lco == txt "Infinite" && not simpl
= do let (f, _) = unApply arg
let ntimes' = case f of
P _ fn _ -> (fn, 0) : ntimes
_ -> ntimes
when spec $ setBlock True
d' <- ev ntimes' stk False env d
l' <- ev ntimes' stk False env l
t' <- ev ntimes' stk False env t
arg' <- ev ntimes' stk False env arg
when spec $ setBlock False
evApply ntimes' stk top env [l',t',arg'] d'
-- Treat "assert_total" specially, as long as it's defined!
ev ntimes stk top env (App _ (App _ (P _ n@(UN at) _) _) arg)
| [(CaseOp _ _ _ _ _ _, _)] <- lookupDefAcc n (spec || atRepl || runtime) ctxt,
at == txt "assert_total" && not simpl
= ev ntimes (n : stk) top env arg
ev ntimes stk top env (App _ f a)
= do f' <- ev ntimes stk False env f
a' <- ev ntimes stk False env a
evApply ntimes stk top env [a'] f'
ev ntimes stk top env (Proj t i)
= do -- evaluate dictionaries if it means the projection works
t' <- ev ntimes stk top env t
-- tfull' <- reapply ntimes stk top env t' []
return (doProj t' (getValArgs t'))
where doProj t' (VP (DCon _ _ _) _ _, args)
| i >= 0 && i < length args = args!!i
doProj t' _ = VProj t' i
ev ntimes stk top env (Constant c) = return $ VConstant c
ev ntimes stk top env Erased = return VErased
ev ntimes stk top env Impossible = return VImpossible
ev ntimes stk top env (TType i) = return $ VType i
ev ntimes stk top env (UType u) = return $ VUType u
evApply ntimes stk top env args (VApp f a)
= evApply ntimes stk top env (a:args) f
evApply ntimes stk top env args f
= apply ntimes stk top env f args
reapply ntimes stk top env f@(VP Ref n ty) args
= let val = lookupDefAcc n (spec || atRepl || runtime) ctxt in
case val of
[(CaseOp ci _ _ _ _ cd, acc)] ->
let (ns, tree) = getCases cd in
do c <- evCase ntimes n (n:stk) top env ns args tree
case c of
(Nothing, _) -> return $ unload env (VP Ref n ty) args
(Just v, rest) -> evApply ntimes stk top env rest v
_ -> case args of
(a : as) -> return $ unload env f (a : as)
[] -> return f
reapply ntimes stk top env (VApp f a) args
= reapply ntimes stk top env f (a : args)
reapply ntimes stk top env v args = return v
apply ntimes stk top env (VBind True n (Lam t) sc) (a:as)
= do a' <- sc a
app <- apply ntimes stk top env a' as
wknV 1 app
apply ntimes_in stk top env f@(VP Ref n ty) args
| not top && hnf = case args of
[] -> return f
_ -> return $ unload env f args
| otherwise
= do let limit = if simpl then 100 else 10000
(u, ntimes) <- usable spec limit n ntimes_in
let red = u && (tcReducible n ctxt || spec || atRepl || runtime
|| sUN "assert_total" `elem` stk)
if red then
do let val = lookupDefAcc n (spec || atRepl || runtime) ctxt
case val of
[(CaseOp ci _ _ _ _ cd, acc)]
| acc == Public || acc == Hidden ->
-- unoptimised version
let (ns, tree) = getCases cd in
if blockSimplify ci n stk
then return $ unload env (VP Ref n ty) args
else -- traceWhen runtime (show (n, ns, tree)) $
do c <- evCase ntimes n (n:stk) top env ns args tree
case c of
(Nothing, _) -> return $ unload env (VP Ref n ty) args
(Just v, rest) -> evApply ntimes stk top env rest v
[(Operator _ i op, _)] ->
if (i <= length args)
then case op (take i args) of
Nothing -> return $ unload env (VP Ref n ty) args
Just v -> evApply ntimes stk top env (drop i args) v
else return $ unload env (VP Ref n ty) args
_ -> case args of
[] -> return f
_ -> return $ unload env f args
else case args of
(a : as) -> return $ unload env f (a:as)
[] -> return f
apply ntimes stk top env f (a:as) = return $ unload env f (a:as)
apply ntimes stk top env f [] = return f
-- specApply stk env f@(VP Ref n ty) args
-- = case lookupCtxt n statics of
-- [as] -> if or as
-- then trace (show (n, map fst (filter (\ (_, s) -> s) (zip args as)))) $
-- return $ unload env f args
-- else return $ unload env f args
-- _ -> return $ unload env f args
-- specApply stk env f args = return $ unload env f args
unload :: [(Name, Value)] -> Value -> [Value] -> Value
unload env f [] = f
unload env f (a:as) = unload env (VApp f a) as
evCase ntimes n stk top env ns args tree
| length ns <= length args
= do let args' = take (length ns) args
let rest = drop (length ns) args
when spec $ deduct n
t <- evTree ntimes stk top env (zip ns args') tree
when spec $ case t of
Nothing -> reinstate n -- Blocked, count n again
Just _ -> return ()
-- (zipWith (\n , t) -> (n, t)) ns args') tree
return (t, rest)
| otherwise = return (Nothing, args)
evTree :: [(Name, Int)] -> [Name] -> Bool ->
[(Name, Value)] -> [(Name, Value)] -> SC -> Eval (Maybe Value)
evTree ntimes stk top env amap (UnmatchedCase str) = return Nothing
evTree ntimes stk top env amap (STerm tm)
= do let etm = pToVs (map fst amap) tm
etm' <- ev ntimes stk (not (conHeaded tm))
(amap ++ env) etm
return $ Just etm'
evTree ntimes stk top env amap (ProjCase t alts)
= do t' <- ev ntimes stk top env t
doCase ntimes stk top env amap t' alts
evTree ntimes stk top env amap (Case _ n alts)
= case lookup n amap of
Just v -> doCase ntimes stk top env amap v alts
_ -> return Nothing
evTree ntimes stk top env amap ImpossibleCase = return Nothing
doCase ntimes stk top env amap v alts =
do c <- chooseAlt env v (getValArgs v) alts amap
case c of
Just (altmap, sc) -> evTree ntimes stk top env altmap sc
_ -> do c' <- chooseAlt' ntimes stk env v (getValArgs v) alts amap
case c' of
Just (altmap, sc) -> evTree ntimes stk top env altmap sc
_ -> return Nothing
conHeaded tm@(App _ _ _)
| (P (DCon _ _ _) _ _, args) <- unApply tm = True
conHeaded t = False
chooseAlt' ntimes stk env _ (f, args) alts amap
= do f' <- apply ntimes stk True env f args
chooseAlt env f' (getValArgs f')
alts amap
chooseAlt :: [(Name, Value)] -> Value -> (Value, [Value]) -> [CaseAlt] ->
[(Name, Value)] ->
Eval (Maybe ([(Name, Value)], SC))
chooseAlt env _ (VP (DCon i a _) _ _, args) alts amap
| Just (ns, sc) <- findTag i alts = return $ Just (updateAmap (zip ns args) amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VP (TCon i a) _ _, args) alts amap
| Just (ns, sc) <- findTag i alts
= return $ Just (updateAmap (zip ns args) amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VConstant c, []) alts amap
| Just v <- findConst c alts = return $ Just (amap, v)
| Just (n', sub, sc) <- findSuc c alts
= return $ Just (updateAmap [(n',sub)] amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VP _ n _, args) alts amap
| Just (ns, sc) <- findFn n alts = return $ Just (updateAmap (zip ns args) amap, sc)
chooseAlt env _ (VBind _ _ (Pi i s k) t, []) alts amap
| Just (ns, sc) <- findFn (sUN "->") alts
= do t' <- t (VV 0) -- we know it's not in scope or it's not a pattern
return $ Just (updateAmap (zip ns [s, t']) amap, sc)
chooseAlt _ _ _ alts amap
| Just v <- findDefault alts
= if (any fnCase alts)
then return $ Just (amap, v)
else return Nothing
| otherwise = return Nothing
fnCase (FnCase _ _ _) = True
fnCase _ = False
-- Replace old variable names in the map with new matches
-- (This is possibly unnecessary since we make unique names and don't
-- allow repeated variables...?)
updateAmap newm amap
= newm ++ filter (\ (x, _) -> not (elem x (map fst newm))) amap
findTag i [] = Nothing
findTag i (ConCase n j ns sc : xs) | i == j = Just (ns, sc)
findTag i (_ : xs) = findTag i xs
findFn fn [] = Nothing
findFn fn (FnCase n ns sc : xs) | fn == n = Just (ns, sc)
findFn fn (_ : xs) = findFn fn xs
findDefault [] = Nothing
findDefault (DefaultCase sc : xs) = Just sc
findDefault (_ : xs) = findDefault xs
findSuc c [] = Nothing
findSuc (BI val) (SucCase n sc : _)
| val /= 0 = Just (n, VConstant (BI (val - 1)), sc)
findSuc c (_ : xs) = findSuc c xs
findConst c [] = Nothing
findConst c (ConstCase c' v : xs) | c == c' = Just v
findConst (AType (ATInt ITNative)) (ConCase n 1 [] v : xs) = Just v
findConst (AType ATFloat) (ConCase n 2 [] v : xs) = Just v
findConst (AType (ATInt ITChar)) (ConCase n 3 [] v : xs) = Just v
findConst StrType (ConCase n 4 [] v : xs) = Just v
findConst (AType (ATInt ITBig)) (ConCase n 6 [] v : xs) = Just v
findConst (AType (ATInt (ITFixed ity))) (ConCase n tag [] v : xs)
| tag == 7 + fromEnum ity = Just v
findConst c (_ : xs) = findConst c xs
getValArgs tm = getValArgs' tm []
getValArgs' (VApp f a) as = getValArgs' f (a:as)
getValArgs' f as = (f, as)
-- tmpToV i vd (VLetHole j) | vd == j = return $ VV i
-- tmpToV i vd (VP nt n v) = liftM (VP nt n) (tmpToV i vd v)
-- tmpToV i vd (VBind n b sc) = do b' <- fmapMB (tmpToV i vd) b
-- let sc' = \x -> do x' <- sc x
-- tmpToV (i + 1) vd x'
-- return (VBind n b' sc')
-- tmpToV i vd (VApp f a) = liftM2 VApp (tmpToV i vd f) (tmpToV i vd a)
-- tmpToV i vd x = return x
instance Eq Value where
(==) x y = getTT x == getTT y
where getTT v = evalState (quote 0 v) initEval
class Quote a where
quote :: Int -> a -> Eval (TT Name)
instance Quote Value where
quote i (VP nt n v) = liftM (P nt n) (quote i v)
quote i (VV x) = return $ V x
quote i (VBind _ n b sc) = do sc' <- sc (VTmp i)
b' <- quoteB b
liftM (Bind n b') (quote (i+1) sc')
where quoteB t = fmapMB (quote i) t
quote i (VBLet vd n t v sc)
= do sc' <- quote i sc
t' <- quote i t
v' <- quote i v
let sc'' = pToV (sMN vd "vlet") (addBinder sc')
return (Bind n (Let t' v') sc'')
quote i (VApp f a) = liftM2 (App MaybeHoles) (quote i f) (quote i a)
quote i (VType u) = return (TType u)
quote i (VUType u) = return (UType u)
quote i VErased = return Erased
quote i VImpossible = return Impossible
quote i (VProj v j) = do v' <- quote i v
return (Proj v' j)
quote i (VConstant c) = return $ Constant c
quote i (VTmp x) = return $ V (i - x - 1)
wknV :: Int -> Value -> Eval Value
wknV i (VV x) | x >= i = return $ VV (x - 1)
wknV i (VBind red n b sc) = do b' <- fmapMB (wknV i) b
return $ VBind red n b' (\x -> do x' <- sc x
wknV (i + 1) x')
wknV i (VApp f a) = liftM2 VApp (wknV i f) (wknV i a)
wknV i t = return t
isUniverse :: Term -> Bool
isUniverse (TType _) = True
isUniverse (UType _) = True
isUniverse _ = False
isUsableUniverse :: Term -> Bool
isUsableUniverse (UType NullType) = False
isUsableUniverse x = isUniverse x
convEq' ctxt hs x y = evalStateT (convEq ctxt hs x y) (0, [])
convEq :: Context -> [Name] -> TT Name -> TT Name -> StateT UCs TC Bool
convEq ctxt holes topx topy = ceq [] topx topy where
ceq :: [(Name, Name)] -> TT Name -> TT Name -> StateT UCs TC Bool
ceq ps (P xt x _) (P yt y _)
| x `elem` holes || y `elem` holes = return True
| x == y || (x, y) `elem` ps || (y,x) `elem` ps = return True
| otherwise = sameDefs ps x y
ceq ps x (Bind n (Lam t) (App _ y (V 0)))
= ceq ps x (substV (P Bound n t) y)
ceq ps (Bind n (Lam t) (App _ x (V 0))) y
= ceq ps (substV (P Bound n t) x) y
ceq ps x (Bind n (Lam t) (App _ y (P Bound n' _)))
| n == n' = ceq ps x y
ceq ps (Bind n (Lam t) (App _ x (P Bound n' _))) y
| n == n' = ceq ps x y
ceq ps (Bind n (PVar t) sc) y = ceq ps sc y
ceq ps x (Bind n (PVar t) sc) = ceq ps x sc
ceq ps (Bind n (PVTy t) sc) y = ceq ps sc y
ceq ps x (Bind n (PVTy t) sc) = ceq ps x sc
ceq ps (V x) (V y) = return (x == y)
ceq ps (V x) (P _ y _)
| x >= 0 && length ps > x = return (fst (ps!!x) == y)
| otherwise = return False
ceq ps (P _ x _) (V y)
| y >= 0 && length ps > y = return (x == snd (ps!!y))
| otherwise = return False
ceq ps (Bind n xb xs) (Bind n' yb ys)
= liftM2 (&&) (ceqB ps xb yb) (ceq ((n,n'):ps) xs ys)
where
ceqB ps (Let v t) (Let v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t')
ceqB ps (Guess v t) (Guess v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t')
ceqB ps (Pi i v t) (Pi i' v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t')
ceqB ps b b' = ceq ps (binderTy b) (binderTy b')
-- Special case for 'case' blocks - size of scope causes complications,
-- we only want to check the blocks themselves are valid and identical
-- in the current scope. So, just check the bodies, and the additional
-- arguments the case blocks are applied to.
ceq ps x@(App _ _ _) y@(App _ _ _)
| (P _ cx _, xargs) <- unApply x,
(P _ cy _, yargs) <- unApply y,
caseName cx && caseName cy = sameCase ps cx cy xargs yargs
ceq ps (App _ fx ax) (App _ fy ay) = liftM2 (&&) (ceq ps fx fy) (ceq ps ax ay)
ceq ps (Constant x) (Constant y) = return (x == y)
ceq ps (TType x) (TType y) | x == y = return True
ceq ps (TType (UVal 0)) (TType y) = return True
ceq ps (TType x) (TType y) = do (v, cs) <- get
put (v, ULE x y : cs)
return True
ceq ps (UType AllTypes) x = return (isUsableUniverse x)
ceq ps x (UType AllTypes) = return (isUsableUniverse x)
ceq ps (UType u) (UType v) = return (u == v)
ceq ps Erased _ = return True
ceq ps _ Erased = return True
ceq ps x y = return False
caseeq ps (Case _ n cs) (Case _ n' cs') = caseeqA ((n,n'):ps) cs cs'
where
caseeqA ps (ConCase x i as sc : rest) (ConCase x' i' as' sc' : rest')
= do q1 <- caseeq (zip as as' ++ ps) sc sc'
q2 <- caseeqA ps rest rest'
return $ x == x' && i == i' && q1 && q2
caseeqA ps (ConstCase x sc : rest) (ConstCase x' sc' : rest')
= do q1 <- caseeq ps sc sc'
q2 <- caseeqA ps rest rest'
return $ x == x' && q1 && q2
caseeqA ps (DefaultCase sc : rest) (DefaultCase sc' : rest')
= liftM2 (&&) (caseeq ps sc sc') (caseeqA ps rest rest')
caseeqA ps [] [] = return True
caseeqA ps _ _ = return False
caseeq ps (STerm x) (STerm y) = ceq ps x y
caseeq ps (UnmatchedCase _) (UnmatchedCase _) = return True
caseeq ps _ _ = return False
sameDefs ps x y = case (lookupDef x ctxt, lookupDef y ctxt) of
([Function _ xdef], [Function _ ydef])
-> ceq ((x,y):ps) xdef ydef
([CaseOp _ _ _ _ _ xd],
[CaseOp _ _ _ _ _ yd])
-> let (_, xdef) = cases_compiletime xd
(_, ydef) = cases_compiletime yd in
caseeq ((x,y):ps) xdef ydef
_ -> return False
sameCase :: [(Name, Name)] -> Name -> Name -> [Term] -> [Term] ->
StateT UCs TC Bool
sameCase ps x y xargs yargs
= case (lookupDef x ctxt, lookupDef y ctxt) of
([Function _ xdef], [Function _ ydef])
-> ceq ((x,y):ps) xdef ydef
([CaseOp _ _ _ _ _ xd],
[CaseOp _ _ _ _ _ yd])
-> let (xin, xdef) = cases_compiletime xd
(yin, ydef) = cases_compiletime yd in
do liftM2 (&&)
(do ok <- zipWithM (ceq ps)
(drop (length xin) xargs)
(drop (length yin) yargs)
return (and ok))
(caseeq ((x,y):ps) xdef ydef)
_ -> return False
-- SPECIALISATION -----------------------------------------------------------
-- We need too much control to be able to do this by tweaking the main
-- evaluator
spec :: Context -> Ctxt [Bool] -> Env -> TT Name -> Eval (TT Name)
spec ctxt statics genv tm = error "spec undefined"
-- CONTEXTS -----------------------------------------------------------------
{-| A definition is either a simple function (just an expression with a type),
a constant, which could be a data or type constructor, an axiom or as an
yet undefined function, or an Operator.
An Operator is a function which explains how to reduce.
A CaseOp is a function defined by a simple case tree -}
data Def = Function !Type !Term
| TyDecl NameType !Type
| Operator Type Int ([Value] -> Maybe Value)
| CaseOp CaseInfo
!Type
![(Type, Bool)] -- argument types, whether canonical
![Either Term (Term, Term)] -- original definition
![([Name], Term, Term)] -- simplified for totality check definition
!CaseDefs
deriving Generic
-- [Name] SC -- Compile time case definition
-- [Name] SC -- Run time cae definitions
data CaseDefs = CaseDefs {
cases_totcheck :: !([Name], SC),
cases_compiletime :: !([Name], SC),
cases_inlined :: !([Name], SC),
cases_runtime :: !([Name], SC)
}
deriving Generic
data CaseInfo = CaseInfo {
case_inlinable :: Bool, -- decided by machine
case_alwaysinline :: Bool, -- decided by %inline flag
tc_dictionary :: Bool
}
deriving Generic
{-!
deriving instance Binary Def
!-}
{-!
deriving instance Binary CaseInfo
!-}
{-!
deriving instance Binary CaseDefs
!-}
instance Show Def where
show (Function ty tm) = "Function: " ++ show (ty, tm)
show (TyDecl nt ty) = "TyDecl: " ++ show nt ++ " " ++ show ty
show (Operator ty _ _) = "Operator: " ++ show ty
show (CaseOp (CaseInfo inlc inla inlr) ty atys ps_in ps cd)
= let (ns, sc) = cases_compiletime cd
(ns_t, sc_t) = cases_totcheck cd
(ns', sc') = cases_runtime cd in
"Case: " ++ show ty ++ " " ++ show ps ++ "\n" ++
"TOTALITY CHECK TIME:\n\n" ++
show ns_t ++ " " ++ show sc_t ++ "\n\n" ++
"COMPILE TIME:\n\n" ++
show ns ++ " " ++ show sc ++ "\n\n" ++
"RUN TIME:\n\n" ++
show ns' ++ " " ++ show sc' ++ "\n\n" ++
if inlc then "Inlinable" else "Not inlinable" ++
if inla then " Aggressively\n" else "\n"
-------
-- Hidden => Programs can't access the name at all
-- Public => Programs can access the name and use at will
-- Frozen => Programs can access the name, which doesn't reduce
-- Private => Programs can't access the name, doesn't reduce internally
data Accessibility = Hidden | Public | Frozen | Private
deriving (Eq, Ord, Generic)
instance Show Accessibility where
show Public = "public export"
show Frozen = "export"
show Private = "private"
show Hidden = "hidden"
type Injectivity = Bool
-- | The result of totality checking
data Totality = Total [Int] -- ^ well-founded arguments
| Productive -- ^ productive
| Partial PReason
| Unchecked
| Generated
deriving (Eq, Generic)
-- | Reasons why a function may not be total
data PReason = Other [Name] | Itself | NotCovering | NotPositive | UseUndef Name
| ExternalIO | BelieveMe | Mutual [Name] | NotProductive
deriving (Show, Eq, Generic)
instance Show Totality where
show (Total args)= "Total" -- ++ show args ++ " decreasing arguments"
show Productive = "Productive" -- ++ show args ++ " decreasing arguments"
show Unchecked = "not yet checked for totality"
show (Partial Itself) = "possibly not total as it is not well founded"
show (Partial NotCovering) = "not total as there are missing cases"
show (Partial NotPositive) = "not strictly positive"
show (Partial ExternalIO) = "an external IO primitive"
show (Partial NotProductive) = "not productive"
show (Partial BelieveMe) = "not total due to use of believe_me in proof"
show (Partial (Other ns)) = "possibly not total due to: " ++ showSep ", " (map show ns)
show (Partial (Mutual ns)) = "possibly not total due to recursive path " ++
showSep " --> " (map show ns)
show (Partial (UseUndef n)) = "possibly not total because it uses the undefined name " ++ show n
show Generated = "auto-generated"
{-!
deriving instance Binary Accessibility
!-}
{-!
deriving instance Binary Totality
!-}
{-!
deriving instance Binary PReason
!-}
-- Possible attached meta-information for a definition in context
data MetaInformation =
EmptyMI -- ^ No meta-information
| DataMI [Int] -- ^ Meta information for a data declaration with position of parameters
deriving (Eq, Show, Generic)
-- | Contexts used for global definitions and for proof state. They contain
-- universe constraints and existing definitions.
data Context = MkContext {
next_tvar :: Int,
definitions :: Ctxt (Def, Injectivity, Accessibility, Totality, MetaInformation)
} deriving (Show, Generic)
-- | The initial empty context
initContext = MkContext 0 emptyContext
mapDefCtxt :: (Def -> Def) -> Context -> Context
mapDefCtxt f (MkContext t !defs) = MkContext t (mapCtxt f' defs)
where f' (!d, i, a, t, m) = f' (f d, i, a, t, m)
-- | Get the definitions from a context
ctxtAlist :: Context -> [(Name, Def)]
ctxtAlist ctxt = map (\(n, (d, i, a, t, m)) -> (n, d)) $ toAlist (definitions ctxt)
veval ctxt env t = evalState (eval False ctxt [] env t []) initEval
addToCtxt :: Name -> Term -> Type -> Context -> Context
addToCtxt n tm ty uctxt
= let ctxt = definitions uctxt
!ctxt' = addDef n (Function ty tm, False, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
setAccess :: Name -> Accessibility -> Context -> Context
setAccess n a uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, i, _, t, m) -> (d, i, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setInjective :: Name -> Injectivity -> Context -> Context
setInjective n i uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, _, a, t, m) -> (d, i, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setTotal :: Name -> Totality -> Context -> Context
setTotal n t uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, i, a, _, m) -> (d, i, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setMetaInformation :: Name -> MetaInformation -> Context -> Context
setMetaInformation n m uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, i, a, t, _) -> (d, i, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
addCtxtDef :: Name -> Def -> Context -> Context
addCtxtDef n d c = let ctxt = definitions c
!ctxt' = addDef n (d, False, Public, Unchecked, EmptyMI) $! ctxt in
c { definitions = ctxt' }
addTyDecl :: Name -> NameType -> Type -> Context -> Context
addTyDecl n nt ty uctxt
= let ctxt = definitions uctxt
!ctxt' = addDef n (TyDecl nt ty, False, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
addDatatype :: Datatype Name -> Context -> Context
addDatatype (Data n tag ty unique cons) uctxt
= let ctxt = definitions uctxt
ty' = normalise uctxt [] ty
!ctxt' = addCons 0 cons (addDef n
(TyDecl (TCon tag (arity ty')) ty, True, Public, Unchecked, EmptyMI) ctxt) in
uctxt { definitions = ctxt' }
where
addCons tag [] ctxt = ctxt
addCons tag ((n, ty) : cons) ctxt
= let ty' = normalise uctxt [] ty in
addCons (tag+1) cons (addDef n
(TyDecl (DCon tag (arity ty') unique) ty, True, Public, Unchecked, EmptyMI) ctxt)
-- FIXME: Too many arguments! Refactor all these Bools.
--
-- Issue #1724 on the issue tracker.
-- https://github.com/idris-lang/Idris-dev/issues/1724
addCasedef :: Name -> ErasureInfo -> CaseInfo ->
Bool -> SC -> -- default case
Bool -> Bool ->
[(Type, Bool)] -> -- argument types, whether canonical
[Int] -> -- inaccessible arguments
[Either Term (Term, Term)] ->
[([Name], Term, Term)] -> -- totality
[([Name], Term, Term)] -> -- compile time
[([Name], Term, Term)] -> -- inlined
[([Name], Term, Term)] -> -- run time
Type -> Context -> TC Context
addCasedef n ei ci@(CaseInfo inline alwaysInline tcdict)
tcase covering reflect asserted argtys inacc
ps_in ps_tot ps_inl ps_ct ps_rt ty uctxt
= do let ctxt = definitions uctxt
access = case lookupDefAcc n False uctxt of
[(_, acc)] -> acc
_ -> Public
totalityTime <- simpleCase tcase covering reflect CompileTime emptyFC inacc argtys ps_tot ei
compileTime <- simpleCase tcase covering reflect CompileTime emptyFC inacc argtys ps_ct ei
inlined <- simpleCase tcase covering reflect CompileTime emptyFC inacc argtys ps_inl ei
runtime <- simpleCase tcase covering reflect RunTime emptyFC inacc argtys ps_rt ei
ctxt' <- case (totalityTime, compileTime, inlined, runtime) of
(CaseDef args_tot sc_tot _,
CaseDef args_ct sc_ct _,
CaseDef args_inl sc_inl _,
CaseDef args_rt sc_rt _) ->
let inl = alwaysInline -- tcdict
inlc = (inl || small n args_ct sc_ct) && (not asserted)
inlr = inl || small n args_rt sc_rt
cdef = CaseDefs (args_tot, sc_tot)
(args_ct, sc_ct)
(args_inl, sc_inl)
(args_rt, sc_rt)
op = (CaseOp (ci { case_inlinable = inlc })
ty argtys ps_in ps_tot cdef,
False, access, Unchecked, EmptyMI)
in return $ addDef n op ctxt
-- other -> tfail (Msg $ "Error adding case def: " ++ show other)
return uctxt { definitions = ctxt' }
-- simplify a definition for totality checking
simplifyCasedef :: Name -> ErasureInfo -> Context -> TC Context
simplifyCasedef n ei uctxt
= do let ctxt = definitions uctxt
ctxt' <- case lookupCtxt n ctxt of
[(CaseOp ci ty atys [] ps _, inj, acc, tot, metainf)] ->
return ctxt -- nothing to simplify (or already done...)
[(CaseOp ci ty atys ps_in ps cd, inj, acc, tot, metainf)] ->
do let ps_in' = map simpl ps_in
pdef = map debind ps_in'
CaseDef args sc _ <- simpleCase False (STerm Erased) False CompileTime emptyFC [] atys pdef ei
return $ addDef n (CaseOp ci
ty atys ps_in' ps (cd { cases_totcheck = (args, sc) }),
inj, acc, tot, metainf) ctxt
_ -> return ctxt
return uctxt { definitions = ctxt' }
where
depat acc (Bind n (PVar t) sc)
= depat (n : acc) (instantiate (P Bound n t) sc)
depat acc x = (acc, x)
debind (Right (x, y)) = let (vs, x') = depat [] x
(_, y') = depat [] y in
(vs, x', y')
debind (Left x) = let (vs, x') = depat [] x in
(vs, x', Impossible)
simpl (Right (x, y)) = Right (x, simplify uctxt [] y)
simpl t = t
addOperator :: Name -> Type -> Int -> ([Value] -> Maybe Value) ->
Context -> Context
addOperator n ty a op uctxt
= let ctxt = definitions uctxt
ctxt' = addDef n (Operator ty a op, False, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
tfst (a, _, _, _, _) = a
lookupNames :: Name -> Context -> [Name]
lookupNames n ctxt
= let ns = lookupCtxtName n (definitions ctxt) in
map fst ns
-- | Get the list of pairs of fully-qualified names and their types that match some name
lookupTyName :: Name -> Context -> [(Name, Type)]
lookupTyName n ctxt = do
(name, def) <- lookupCtxtName n (definitions ctxt)
ty <- case tfst def of
(Function ty _) -> return ty
(TyDecl _ ty) -> return ty
(Operator ty _ _) -> return ty
(CaseOp _ ty _ _ _ _) -> return ty
return (name, ty)
-- | Get the pair of a fully-qualified name and its type, if there is a unique one matching the name used as a key.
lookupTyNameExact :: Name -> Context -> Maybe (Name, Type)
lookupTyNameExact n ctxt = listToMaybe [ (nm, v) | (nm, v) <- lookupTyName n ctxt, nm == n ]
-- | Get the types that match some name
lookupTy :: Name -> Context -> [Type]
lookupTy n ctxt = map snd (lookupTyName n ctxt)
-- | Get the single type that matches some name precisely
lookupTyExact :: Name -> Context -> Maybe Type
lookupTyExact n ctxt = fmap snd (lookupTyNameExact n ctxt)
-- | Return true if the given type is a concrete type familyor primitive
-- False it it's a function to compute a type or a variable
isCanonical :: Type -> Context -> Bool
isCanonical t ctxt
= case unApply t of
(P _ n _, _) -> isConName n ctxt
(Constant _, _) -> True
_ -> False
isConName :: Name -> Context -> Bool
isConName n ctxt = isTConName n ctxt || isDConName n ctxt
isTConName :: Name -> Context -> Bool
isTConName n ctxt
= case lookupDefExact n ctxt of
Just (TyDecl (TCon _ _) _) -> True
_ -> False
-- | Check whether a resolved name is certainly a data constructor
isDConName :: Name -> Context -> Bool
isDConName n ctxt
= case lookupDefExact n ctxt of
Just (TyDecl (DCon _ _ _) _) -> True
_ -> False
-- | Check whether any overloading of a name is a data constructor
canBeDConName :: Name -> Context -> Bool
canBeDConName n ctxt
= or $ do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(TyDecl (DCon _ _ _) _) -> return True
_ -> return False
isFnName :: Name -> Context -> Bool
isFnName n ctxt
= case lookupDefExact n ctxt of
Just (Function _ _) -> True
Just (Operator _ _ _) -> True
Just (CaseOp _ _ _ _ _ _) -> True
_ -> False
isTCDict :: Name -> Context -> Bool
isTCDict n ctxt
= case lookupDefExact n ctxt of
Just (Function _ _) -> False
Just (Operator _ _ _) -> False
Just (CaseOp ci _ _ _ _ _) -> tc_dictionary ci
_ -> False
lookupP :: Name -> Context -> [Term]
lookupP = lookupP_all False False
lookupP_all :: Bool -> Bool -> Name -> Context -> [Term]
lookupP_all all exact n ctxt
= do (n', def) <- names
p <- case def of
(Function ty tm, inj, a, _, _) -> return (P Ref n' ty, a)
(TyDecl nt ty, _, a, _, _) -> return (P nt n' ty, a)
(CaseOp _ ty _ _ _ _, inj, a, _, _) -> return (P Ref n' ty, a)
(Operator ty _ _, inj, a, _, _) -> return (P Ref n' ty, a)
case snd p of
Hidden -> if all then return (fst p) else []
Private -> if all then return (fst p) else []
_ -> return (fst p)
where
names = let ns = lookupCtxtName n (definitions ctxt) in
if exact
then filter (\ (n', d) -> n' == n) ns
else ns
lookupDefExact :: Name -> Context -> Maybe Def
lookupDefExact n ctxt = tfst <$> lookupCtxtExact n (definitions ctxt)
lookupDef :: Name -> Context -> [Def]
lookupDef n ctxt = tfst <$> lookupCtxt n (definitions ctxt)
lookupNameDef :: Name -> Context -> [(Name, Def)]
lookupNameDef n ctxt = mapSnd tfst $ lookupCtxtName n (definitions ctxt)
where mapSnd f [] = []
mapSnd f ((x,y):xys) = (x, f y) : mapSnd f xys
lookupDefAcc :: Name -> Bool -> Context ->
[(Def, Accessibility)]
lookupDefAcc n mkpublic ctxt
= map mkp $ lookupCtxt n (definitions ctxt)
-- io_bind a special case for REPL prettiness
where mkp (d, inj, a, _, _) = if mkpublic && (not (n == sUN "io_bind" || n == sUN "io_pure"))
then (d, Public) else (d, a)
lookupDefAccExact :: Name -> Bool -> Context ->
Maybe (Def, Accessibility)
lookupDefAccExact n mkpublic ctxt
= fmap mkp $ lookupCtxtExact n (definitions ctxt)
-- io_bind a special case for REPL prettiness
where mkp (d, inj, a, _, _) = if mkpublic && (not (n == sUN "io_bind" || n == sUN "io_pure"))
then (d, Public) else (d, a)
lookupTotal :: Name -> Context -> [Totality]
lookupTotal n ctxt = map mkt $ lookupCtxt n (definitions ctxt)
where mkt (d, inj, a, t, m) = t
lookupTotalExact :: Name -> Context -> Maybe Totality
lookupTotalExact n ctxt = fmap mkt $ lookupCtxtExact n (definitions ctxt)
where mkt (d, inj, a, t, m) = t
lookupInjectiveExact :: Name -> Context -> Maybe Injectivity
lookupInjectiveExact n ctxt = fmap mkt $ lookupCtxtExact n (definitions ctxt)
where mkt (d, inj, a, t, m) = inj
-- Check if a name is reducible in the type checker. Partial definitions
-- are not reducible (so treated as a constant)
tcReducible :: Name -> Context -> Bool
tcReducible n ctxt = case lookupTotalExact n ctxt of
Nothing -> True
Just (Partial _) -> False
_ -> True
lookupMetaInformation :: Name -> Context -> [MetaInformation]
lookupMetaInformation n ctxt = map mkm $ lookupCtxt n (definitions ctxt)
where mkm (d, inj, a, t, m) = m
lookupNameTotal :: Name -> Context -> [(Name, Totality)]
lookupNameTotal n = map (\(n, (_, _, _, t, _)) -> (n, t)) . lookupCtxtName n . definitions
lookupVal :: Name -> Context -> [Value]
lookupVal n ctxt
= do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(Function _ htm) -> return (veval ctxt [] htm)
(TyDecl nt ty) -> return (VP nt n (veval ctxt [] ty))
_ -> []
lookupTyEnv :: Name -> Env -> Maybe (Int, Type)
lookupTyEnv n env = li n 0 env where
li n i [] = Nothing
li n i ((x, b): xs)
| n == x = Just (i, binderTy b)
| otherwise = li n (i+1) xs
-- | Create a unique name given context and other existing names
uniqueNameCtxt :: Context -> Name -> [Name] -> Name
uniqueNameCtxt ctxt n hs
| n `elem` hs = uniqueNameCtxt ctxt (nextName n) hs
| [_] <- lookupTy n ctxt = uniqueNameCtxt ctxt (nextName n) hs
| otherwise = n
uniqueBindersCtxt :: Context -> [Name] -> TT Name -> TT Name
uniqueBindersCtxt ctxt ns (Bind n b sc)
= let n' = uniqueNameCtxt ctxt n ns in
Bind n' (fmap (uniqueBindersCtxt ctxt (n':ns)) b) (uniqueBindersCtxt ctxt ns sc)
uniqueBindersCtxt ctxt ns (App s f a) = App s (uniqueBindersCtxt ctxt ns f) (uniqueBindersCtxt ctxt ns a)
uniqueBindersCtxt ctxt ns t = t
|
ben-schulz/Idris-dev
|
src/Idris/Core/Evaluate.hs
|
bsd-3-clause
| 52,168 | 0 | 28 | 19,303 | 18,211 | 9,304 | 8,907 | 927 | 80 |
module Codec.Crypto.DSA(
module Codec.Crypto.DSA.Exceptions
)
where
import Codec.Crypto.DSA.Exceptions
|
GaloisInc/DSA
|
src/Codec/Crypto/DSA.hs
|
bsd-3-clause
| 121 | 0 | 5 | 26 | 24 | 17 | 7 | 3 | 0 |
--
-- Copyright © 2013-2014 Anchor Systems, Pty Ltd and Others
--
-- The code in this file, and the program it is a part of, is
-- made available to you by its authors as open source software:
-- you can redistribute it and/or modify it under the terms of
-- the 3-clause BSD licence.
--
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Selecto.NagiosSearch where
import Chevalier.Types (SourceQuery (..))
import Chevalier.Util (buildFuzzyRequestFromPairs,buildFuzzyRequestTag)
import Control.Applicative
import Control.Concurrent hiding (yield)
import Control.Monad.IO.Class
import Data.ByteString.Lazy.Builder (stringUtf8)
import Data.Maybe
import Selecto.Util
import Snap.Core
import System.Timeout (timeout)
nagiosSearch :: MVar SourceQuery -> Snap ()
nagiosSearch chevalier_mvar = do
host <- utf8Or400 =<< fromMaybe "*" <$> getParam "host"
service <- utf8Or400 =<< fromMaybe "*" <$> getParam "service"
origin_alias <- getParam "origin" >>= (\o -> case o of
Just bs -> utf8Or400 bs
Nothing -> writeError 400 $ stringUtf8 "Must specify 'origin'")
address <- utf8Or400 =<< fromMaybe "*" <$> getParam "address"
page <- toInt <$> fromMaybe "0" <$> getParam "page"
page_size <- toInt <$> fromMaybe "64" <$> getParam "page_size"
let origin = case origin_alias of
"SYD1" -> "R82KX1"
"LAX1" -> "LMRH8C"
let query = buildFuzzyRequestTag "host" host --FromPairs [("host", host), ("service",service)]
maybe_response <- liftIO $ do
response_mvar <- newEmptyMVar
putMVar chevalier_mvar $
SourceQuery query address page page_size origin response_mvar
timeout chevalierTimeout $ takeMVar response_mvar
either_response <- maybe timeoutError return maybe_response
either chevalierError writeJSON either_response
where
chevalierTimeout = 10000000 -- 10 seconds
chevalierError e =
writeError 500 $ stringUtf8 ("Exception talking to chevalier backend" ++ show e)
timeoutError = do
let msg = "Timed out talking to chevalier backend"
writeError 500 $ stringUtf8 msg
|
glasnt/selecto
|
src/Selecto/NagiosSearch.hs
|
bsd-3-clause
| 2,187 | 0 | 15 | 477 | 468 | 233 | 235 | 40 | 3 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TemplateHaskell #-}
module Test.Async.IO
( ioTestGroup
) where
import Control.Monad (when, void)
import Data.Maybe (isJust, isNothing)
import Control.Concurrent.Lifted
import Control.Exception.Lifted as E
#if MIN_VERSION_monad_control(1, 0, 0)
import Control.Concurrent.Async.Lifted.Safe
#else
import Control.Concurrent.Async.Lifted
#endif
import Test.Async.Common
ioTestGroup :: TestTree
ioTestGroup = $(testGroupGenerator)
case_async_waitCatch :: Assertion
case_async_waitCatch = do
a <- async (return value)
r <- waitCatch a
case r of
Left _ -> assertFailure ""
Right e -> e @?= value
case_async_wait :: Assertion
case_async_wait = do
a <- async (return value)
r <- wait a
assertEqual "async_wait" r value
case_async_exwaitCatch :: Assertion
case_async_exwaitCatch = do
a <- async (throwIO TestException)
r <- waitCatch a
case r of
Left e -> fromException e @?= Just TestException
Right _ -> assertFailure ""
case_async_exwait :: Assertion
case_async_exwait = do
a <- async (throwIO TestException)
(wait a >> assertFailure "") `E.catch` \e -> e @?= TestException
case_withAsync_waitCatch :: Assertion
case_withAsync_waitCatch = do
withAsync (return value) $ \a -> do
r <- waitCatch a
case r of
Left _ -> assertFailure ""
Right e -> e @?= value
case_withAsync_wait2 :: Assertion
case_withAsync_wait2 = do
a <- withAsync (threadDelay 1000000) $ return
r <- waitCatch a
case r of
Left e -> fromException e @?= Just ThreadKilled
Right _ -> assertFailure ""
case_async_cancel :: Assertion
case_async_cancel = sequence_ $ replicate 1000 run
where
run = do
a <- async (return value)
cancelWith a TestException
r <- waitCatch a
case r of
Left e -> fromException e @?= Just TestException
Right r' -> r' @?= value
case_async_poll :: Assertion
case_async_poll = do
a <- async (threadDelay 1000000)
r <- poll a
when (isJust r) $ assertFailure ""
r' <- poll a -- poll twice, just to check we don't deadlock
when (isJust r') $ assertFailure ""
case_async_poll2 :: Assertion
case_async_poll2 = do
a <- async (return value)
void $ wait a
r <- poll a
when (isNothing r) $ assertFailure ""
r' <- poll a -- poll twice, just to check we don't deadlock
when (isNothing r') $ assertFailure ""
|
dmjio/lifted-async
|
tests/Test/Async/IO.hs
|
bsd-3-clause
| 2,374 | 0 | 14 | 504 | 762 | 366 | 396 | 73 | 2 |
module CodeGeneration.CodeGenerator (generateCode) where
import Control.Monad.Reader
import Control.Monad.Writer
import Tree.HtmlTree
import CodeGeneration.JavascriptCode
import qualified Data.Map as Map
type CodeGeneration = WriterT String (Reader JavascriptCode) ()
-- | Generate the JS code to create the nodes
generateCode :: JavascriptCode -> [HtmlNode] -> String
generateCode js nodes = let (_, code) = runReader (runWriterT $ mapM_ (writeNode Nothing) nodes) js
in code
writeNode :: Maybe HtmlNode -> HtmlNode -> CodeGeneration
-- | Write a comment node. Writes the comment as a JS comment
writeNode _ (HtmlComment text) = writeLine $ "/*" ++ text ++ " */"
-- | Write a text node. Creates the text node and appends it to it's parent
writeNode parent HtmlText { textVarName = Just varName, text = text'} = do
js <- lift ask
writeLine $ "var " ++ varName ++ " = document.createTextNode(" ++ stringTemplate js text' ++ ");"
appendToParent parent varName
-- | Write an element node
writeNode parent node@HtmlElement { elementVarName = Just varName, tag = tag',
children = children', attributes = attributes' } = do
js <- lift ask
writeLine $ "var " ++ varName ++ " = document.createElement(\"" ++ tag' ++ "\");"
writeAttributes varName attributes'
forM_ children' (writeNode (Just node))
appendToParent parent varName
-- | Write a repeated node
writeNode parent HtmlRepeatedElement { repeater = repeater', variable = variable',
node = node' } = do
js <- lift ask
(_, body) <- lift (runWriterT $ writeNode parent node')
writeLine $ repeater' ++ ".forEach(" ++ writeFunction js [variable'] body ++ ");"
-- | Write the code to append a node to its parent
appendToParent :: Maybe HtmlNode -> String -> CodeGeneration
appendToParent (Just HtmlElement { elementVarName = Just varName }) child =
writeLine $ varName ++ ".appendChild(" ++ child ++ ");"
appendToParent Nothing _ = return ()
-- | Write the code for the attribute s of the node
writeAttributes :: String -> Map.Map String String -> CodeGeneration
writeAttributes varName = sequence_ . Map.foldrWithKey
(\k v acc -> (writeLine (varName ++ ".setAttribute(\"" ++ k ++ "\", \"" ++ v ++ "\");"):acc)) []
-- | Append a line to the current code
writeLine :: String -> CodeGeneration
writeLine = tell . (++ "\n")
|
sergioifg94/Hendoman
|
src/CodeGeneration/CodeGenerator.hs
|
bsd-3-clause
| 2,414 | 0 | 17 | 509 | 648 | 336 | 312 | 37 | 1 |
module Horbits.Orbit (module X, module Horbits.Orbit)
where
import Control.Lens
import Horbits.Body
import Horbits.Orbit.Class as X
import Horbits.Orbit.Data as X
import Horbits.Orbit.Geometry as X
import Horbits.Orbit.Position as X
import Horbits.Orbit.Properties as X
import Horbits.Orbit.Velocity as X
parentBodyId :: Fold BodyId BodyId
parentBodyId = bodyOrbit . orbitBodyId
|
chwthewke/horbits
|
src/horbits/Horbits/Orbit.hs
|
bsd-3-clause
| 481 | 0 | 5 | 147 | 95 | 64 | 31 | 11 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Harihara.Options
( parseOptions
, HariharaOptions (..)
, HariharaException (..)
) where
import Control.Exception
import Data.Foldable (foldrM)
import qualified Data.Set as S
import Data.Typeable (Typeable())
import System.Console.GetOpt
import System.Directory
import System.FilePath.Posix
import System.Exit
import Harihara.Log
-- Harihara Exceptions {{{
data HariharaException
= CantFreshDB String
| FileDoesNotExist String
| InvalidPath String
| NoFlagParse String String
| MissingLastfmConfig
deriving (Typeable)
instance Show HariharaException where
show e = case e of
CantFreshDB fp ->
"Can't make a fresh database with path " ++ show fp
FileDoesNotExist fp ->
"File does not exist: " ++ show fp
InvalidPath fp ->
"Not a valid filepath: " ++ show fp
NoFlagParse flag arg ->
"Couldn't parse arg for flag " ++ show flag ++ ": " ++ show arg
MissingLastfmConfig ->
"Can't use Lastfm function, config didn't define API key or Secret"
instance Exception HariharaException
-- }}}
-- HariharaOptions {{{
data HariharaOptions = HariharaOptions
{ optsLogLevel :: LogLevel
, optsFiles :: S.Set FilePath
, optsDBPath :: FilePath
, optsDBFresh :: Bool
} deriving (Show)
defaultOptions :: HariharaOptions
defaultOptions = HariharaOptions
{ optsLogLevel = LogInfo
, optsFiles = S.empty
, optsDBPath = ".harihara.db"
, optsDBFresh = False
}
--------
onOptsLogLevel :: (LogLevel -> LogLevel)
-> OptionsBuilder
onOptsLogLevel f o = return $ o { optsLogLevel = f $ optsLogLevel o }
setOptsLogLevel :: LogLevel -> OptionsBuilder
setOptsLogLevel ll = onOptsLogLevel $ const ll
--------
onOptsFiles :: (S.Set FilePath -> S.Set FilePath)
-> OptionsBuilder
onOptsFiles f o = return $ o { optsFiles = f $ optsFiles o }
--------
onOptsDBPath :: (FilePath -> FilePath) -> OptionsBuilder
onOptsDBPath f o = return $ o { optsDBPath = f $ optsDBPath o }
setOptsDBPath :: FilePath -> OptionsBuilder
setOptsDBPath fp = onOptsDBPath $ const fp
--------
onOptsDBFresh :: (Bool -> Bool) -> OptionsBuilder
onOptsDBFresh f o = return $ o { optsDBFresh = f $ optsDBFresh o }
setOptsDBFresh :: Bool -> OptionsBuilder
setOptsDBFresh b = onOptsDBFresh $ const b
-- }}}
-- GetOpt {{{
type OptionsBuilder = HariharaOptions -> IO HariharaOptions
parseOptions :: [String] -> IO HariharaOptions
parseOptions args =
case getOpt Permute testOpts args of
(fs, ps, []) -> mkOpts fs ps defaultOptions
(_ , _ , es) -> mapM_ putStrLn es >> usage
usage :: IO a
usage = do
putStrLn "Usage:"
putStr $ usageInfo "harihara [FLAGS] file1 file2 ..." testOpts
exitFailure
testOpts :: [OptDescr OptionsBuilder]
testOpts =
[ Option ['l'] ["log"]
(ReqArg logArg "NUM")
"Log level: 0/silent, 1/error, 2/warn, 3/info, 4/debug"
, Option ['d'] ["database"]
(ReqArg dbArg "FILE")
"Path to database"
, Option [] ["fresh-db"]
(NoArg freshArg)
"Drop the current database entirely"
]
mkOpts :: [OptionsBuilder] -> [FilePath] -> OptionsBuilder
mkOpts fs ps = appBldrs $ fileBldr : fs
where
fileBldr = appBldrs $ map fileArg ps
appBldrs :: [OptionsBuilder] -> OptionsBuilder
appBldrs = flip $ foldrM ($)
freshArg :: OptionsBuilder
freshArg = setOptsDBFresh True
-- | Handle a file argument
fileArg :: String -> OptionsBuilder
fileArg fp o = do
fileExists <- doesFileExist fp
if fileExists
then onOptsFiles (S.insert fp) o
else throwIO $ FileDoesNotExist fp
-- | Handle a log level argument
logArg :: String -> OptionsBuilder
logArg arg o = case arg of
"0" -> setOptsLogLevel LogSilent o
"silent" -> setOptsLogLevel LogSilent o
"1" -> setOptsLogLevel LogError o
"error" -> setOptsLogLevel LogError o
"2" -> setOptsLogLevel LogWarn o
"warn" -> setOptsLogLevel LogWarn o
"3" -> setOptsLogLevel LogInfo o
"info" -> setOptsLogLevel LogInfo o
"4" -> setOptsLogLevel LogDebug o
"debug" -> setOptsLogLevel LogDebug o
_ -> throwIO $ NoFlagParse "LogLevel" arg
-- | Handle a DB path argument
dbArg :: String -> OptionsBuilder
dbArg fp o = if isValid fp
then setOptsDBPath fp o
else throwIO $ InvalidPath fp
-- }}}
|
kylcarte/harihara
|
src/Harihara/Options.hs
|
bsd-3-clause
| 4,324 | 0 | 12 | 929 | 1,156 | 613 | 543 | 115 | 11 |
{-# LANGUAGE
OverloadedStrings
#-}
module Application where
import Web.Routes.Nested
import Network.HTTP.Types
defApp :: Application
defApp _ respond = respond (textOnlyStatus status404 "404 Not Found")
|
athanclark/clark-mining-tech
|
src/Application.hs
|
bsd-3-clause
| 213 | 0 | 7 | 34 | 43 | 25 | 18 | 7 | 1 |
module Test.Collision (testCollision) where
import Test.Framework
import Test.Framework.Providers.QuickCheck2
import Test.QuickCheck
import Data.List hiding (insert)
import Collision
import Environment
import Test.ArbitraryInstances
testCollision =
testGroup "Collision" [testProperty "same result as naive" pNaive]
naiveDetection :: AABB -> [AABB] -> [AABB]
naiveDetection aabb aabbs =
filter (intersectAABB aabb) aabbs
pNaive :: AABB -> [AABB] -> Property
pNaive aabb aabbs =
let
qtree = foldr insert (empty worldSize 4) aabbs
naiveResult = sort $ naiveDetection aabb aabbs
qtreeResult =
sort
$ queryIntersecting aabb qtree
showResults = do
putStrLn $ "QuadTree: " ++ show qtree
putStrLn $ "Expected: " ++ show naiveResult ++ ", actual: " ++ show qtreeResult
in
whenFail showResults
$ naiveResult == qtreeResult
|
alexisVallet/haskell-shmup
|
Test/Collision.hs
|
bsd-3-clause
| 872 | 0 | 14 | 168 | 243 | 127 | 116 | 26 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module LogMan.Processor
( run
) where
import Control.Monad.State
import LogMan.Filters
import LogMan.LogEntry
import LogMan.LogFile
import LogMan.Options
import LogMan.Output
processEntries :: (MonadIO m, MonadState Options m) => [String] -> m ()
processEntries n = do
es <- readLogEntries n
applyFilters es >>= writeOutput
run :: [String] -> IO ()
run argv = do
(options, n) <- parseOptions argv
runStateT (processEntries n) options
return ()
|
cwmunn/logman
|
src/LogMan/Processor.hs
|
bsd-3-clause
| 500 | 0 | 9 | 89 | 168 | 87 | 81 | 18 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveFunctor #-}
module Parser241.Parser.ProductionRule.Internal.Manager where
import Parser241.Parser.ProductionRule.Internal.Maker
import Control.Monad.Writer (Writer(..), runWriter, tell, MonadWriter(..))
import Parser241.Parser.ProductionRule.Internal
import Data.Set as S (Set)
import Data.Map as M (Map, fromList)
import qualified Data.Set as S (fromList)
import Control.Monad (mzero)
newtype Manager' a x = Manager {
unManager :: Writer [Maker a] x
} deriving (Functor, Applicative, Monad, MonadWriter [Maker a])
type Manager a = Manager' a ()
getMakers :: Manager a -> [Maker a]
getMakers m = snd $ runWriter $ unManager m
addMakers :: [Maker a] -> Manager a -> Manager a
addMakers ls m = m >> tell ls
empty :: Manager a
empty = Manager $ tell []
singleton :: Maker a -> Manager a
singleton a = addMakers [a] empty
getRules :: (Ord a) => Manager a -> Set a -> [Rule a]
getRules a nts = do
maker <- getMakers a
let (lhs, rhsLs) = unMaker maker
return $ rule lhs [ reverse $ map (`setSym` nts) rhs | rhs <- rhsLs ]
getNTs :: (Ord a) => Manager a -> Set a
getNTs a = S.fromList $ do
make <- getMakers a
case unMaker make of
(NT x, _) -> return x
_ -> mzero
-- | Collect the defined syntax and produces a list of production rules.
rules :: (Ord a) => Manager a -> [Rule a]
rules a = getRules a $ getNTs a
-- | Collect the defined syntax and produces a map of production rules.
--
-- This is equivalent to `Set.fromList . rules`
ruleMap :: (Ord a) => Manager a -> RuleMap a
ruleMap = M.fromList . rules
instance FromMaker Manager' where
fromMaker = singleton
|
YLiLarry/parser241-production-rule
|
src/Parser241/Parser/ProductionRule/Internal/Manager.hs
|
bsd-3-clause
| 1,721 | 0 | 12 | 346 | 594 | 320 | 274 | 40 | 2 |
-- Copyright 2021 Google LLC
--
-- Use of this source code is governed by a BSD-style
-- license that can be found in the LICENSE file or at
-- https://developers.google.com/open-source/licenses/bsd
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ConstraintKinds #-}
module Err (Err (..), Errs (..), ErrType (..), Except (..), ErrCtx (..),
SrcPosCtx, SrcTextCtx, SrcPos,
Fallible (..), Catchable (..), catchErrExcept,
FallibleM (..), HardFailM (..), CtxReader (..),
runFallibleM, runHardFail, throw, throwErr, throwIf,
addContext, addSrcContext, addSrcTextContext,
catchIOExcept, liftExcept, liftMaybe, liftMaybeErr,
assertEq, ignoreExcept, isSuccess, exceptToMaybe,
pprint, docAsStr, asCompilerErr,
FallibleApplicativeWrapper, traverseMergingErrs, liftFallibleM,
SearcherM (..), Searcher (..), runSearcherM) where
import Control.Exception hiding (throw)
import Control.Applicative
import Control.Monad
import Control.Monad.Trans.Maybe
import Control.Monad.Identity
import Control.Monad.Writer.Strict
import Control.Monad.State.Strict
import Control.Monad.Reader
import Data.Text (unpack)
import Data.Text.Prettyprint.Doc.Render.Text
import Data.Text.Prettyprint.Doc
import GHC.Stack
import System.Environment
import System.IO.Unsafe
-- === core API ===
data Err = Err ErrType ErrCtx String deriving (Show, Eq)
newtype Errs = Errs [Err] deriving (Eq, Semigroup, Monoid)
data ErrType = NoErr
| ParseErr
| TypeErr
| KindErr
| LinErr
| VarDefErr
| UnboundVarErr
| AmbiguousVarErr
| RepeatedVarErr
| RepeatedPatVarErr
| InvalidPatternErr
| CompilerErr
| IRVariantErr
| NotImplementedErr
| DataIOErr
| MiscErr
| RuntimeErr
| ZipErr
| EscapedNameErr
| ModuleImportErr
| MonadFailErr
deriving (Show, Eq)
type SrcPosCtx = Maybe SrcPos
type SrcTextCtx = Maybe (Int, String) -- Int is the offset in the source file
data ErrCtx = ErrCtx
{ srcTextCtx :: SrcTextCtx
, srcPosCtx :: SrcPosCtx
, messageCtx :: [String]
, stackCtx :: Maybe [String] }
deriving (Show, Eq)
type SrcPos = (Int, Int)
class MonadFail m => Fallible m where
throwErrs :: Errs -> m a
addErrCtx :: ErrCtx -> m a -> m a
class Fallible m => Catchable m where
catchErr :: m a -> (Errs -> m a) -> m a
catchErrExcept :: Catchable m => m a -> m (Except a)
catchErrExcept m = catchErr (Success <$> m) (\e -> return $ Failure e)
-- We have this in its own class because IO and `Except` can't implement it
-- (but FallibleM can)
class Fallible m => CtxReader m where
getErrCtx :: m ErrCtx
-- We have this in its own class because StateT can't implement it
-- (but FallibleM, Except and IO all can)
class Fallible m => FallibleApplicative m where
mergeErrs :: m a -> m b -> m (a, b)
newtype FallibleM a =
FallibleM { fromFallibleM :: ReaderT ErrCtx Except a }
deriving (Functor, Applicative, Monad)
instance Fallible FallibleM where
throwErrs (Errs errs) = FallibleM $ ReaderT \ambientCtx ->
throwErrs $ Errs [Err errTy (ambientCtx <> ctx) s | Err errTy ctx s <- errs]
addErrCtx ctx (FallibleM m) = FallibleM $ local (<> ctx) m
instance Catchable FallibleM where
FallibleM m `catchErr` handler = FallibleM $ ReaderT \ctx ->
case runReaderT m ctx of
Failure errs -> runReaderT (fromFallibleM $ handler errs) ctx
Success ans -> return ans
instance FallibleApplicative FallibleM where
mergeErrs (FallibleM (ReaderT f1)) (FallibleM (ReaderT f2)) =
FallibleM $ ReaderT \ctx -> mergeErrs (f1 ctx) (f2 ctx)
instance CtxReader FallibleM where
getErrCtx = FallibleM ask
instance Fallible IO where
throwErrs errs = throwIO errs
addErrCtx ctx m = do
result <- catchIOExcept m
liftExcept $ addErrCtx ctx result
instance Catchable IO where
catchErr cont handler =
catchIOExcept cont >>= \case
Success result -> return result
Failure errs -> handler errs
instance FallibleApplicative IO where
mergeErrs m1 m2 = do
result1 <- catchIOExcept m1
result2 <- catchIOExcept m2
liftExcept $ mergeErrs result1 result2
runFallibleM :: FallibleM a -> Except a
runFallibleM m = runReaderT (fromFallibleM m) mempty
-- === Except type ===
-- Except is isomorphic to `Either Errs` but having a distinct type makes it
-- easier to debug type errors.
data Except a =
Failure Errs
| Success a
deriving (Show, Eq)
instance Functor Except where
fmap = liftM
instance Applicative Except where
pure = return
liftA2 = liftM2
instance Monad Except where
return = Success
Failure errs >>= _ = Failure errs
Success x >>= f = f x
-- === FallibleApplicativeWrapper ===
-- Wraps a Fallible monad, presenting an applicative interface that sequences
-- actions using the error-concatenating `mergeErrs` instead of the default
-- abort-on-failure sequencing.
newtype FallibleApplicativeWrapper m a =
FallibleApplicativeWrapper { fromFallibleApplicativeWrapper :: m a }
deriving (Functor)
instance FallibleApplicative m => Applicative (FallibleApplicativeWrapper m) where
pure x = FallibleApplicativeWrapper $ pure x
liftA2 f (FallibleApplicativeWrapper m1) (FallibleApplicativeWrapper m2) =
FallibleApplicativeWrapper $ fmap (uncurry f) (mergeErrs m1 m2)
-- === HardFail ===
-- Implements Fallible by crashing. Used in type querying when we want to avoid
-- work by trusting decl annotations and skipping the checks.
newtype HardFailM a =
HardFailM { runHardFail' :: Identity a }
deriving (Functor, Applicative, Monad)
runHardFail :: HardFailM a -> a
runHardFail m = runIdentity $ runHardFail' m
instance MonadFail HardFailM where
fail s = error s
instance Fallible HardFailM where
throwErrs errs = error $ pprint errs
addErrCtx _ cont = cont
instance FallibleApplicative HardFailM where
mergeErrs cont1 cont2 = (,) <$> cont1 <*> cont2
-- === convenience layer ===
throw :: Fallible m => ErrType -> String -> m a
throw errTy s = throwErrs $ Errs [addCompilerStackCtx $ Err errTy mempty s]
throwErr :: Fallible m => Err -> m a
throwErr err = throwErrs $ Errs [addCompilerStackCtx err]
addCompilerStackCtx :: Err -> Err
addCompilerStackCtx (Err ty ctx msg) = Err ty ctx{stackCtx = compilerStack} msg
#ifdef DEX_DEBUG
where compilerStack = Just $! reverse $ unsafePerformIO currentCallStack
#else
where compilerStack = stackCtx ctx
#endif
throwIf :: Fallible m => Bool -> ErrType -> String -> m ()
throwIf True e s = throw e s
throwIf False _ _ = return ()
addContext :: Fallible m => String -> m a -> m a
addContext s m = addErrCtx (mempty {messageCtx = [s]}) m
addSrcContext :: Fallible m => SrcPosCtx -> m a -> m a
addSrcContext ctx m = addErrCtx (mempty {srcPosCtx = ctx}) m
addSrcTextContext :: Fallible m => Int -> String -> m a -> m a
addSrcTextContext offset text m =
addErrCtx (mempty {srcTextCtx = Just (offset, text)}) m
catchIOExcept :: MonadIO m => IO a -> m (Except a)
catchIOExcept m = liftIO $ (liftM Success m) `catches`
[ Handler \(e::Errs) -> return $ Failure e
, Handler \(e::IOError) -> return $ Failure $ Errs [Err DataIOErr mempty $ show e]
, Handler \(e::SomeException) -> return $ Failure $ Errs [Err CompilerErr mempty $ show e]
]
liftMaybe :: MonadFail m => Maybe a -> m a
liftMaybe Nothing = fail ""
liftMaybe (Just x) = return x
liftMaybeErr :: Fallible m => ErrType -> String -> Maybe a -> m a
liftMaybeErr err s Nothing = throw err s
liftMaybeErr _ _ (Just x) = return x
liftExcept :: Fallible m => Except a -> m a
liftExcept (Failure errs) = throwErrs errs
liftExcept (Success ans) = return ans
liftFallibleM :: Fallible m => FallibleM a -> m a
liftFallibleM m = liftExcept $ runFallibleM m
ignoreExcept :: HasCallStack => Except a -> a
ignoreExcept (Failure e) = error $ pprint e
ignoreExcept (Success x) = x
isSuccess :: Except a -> Bool
isSuccess (Success _) = True
isSuccess (Failure _) = False
exceptToMaybe :: Except a -> Maybe a
exceptToMaybe (Success a) = Just a
exceptToMaybe (Failure _) = Nothing
assertEq :: (HasCallStack, Fallible m, Show a, Pretty a, Eq a) => a -> a -> String -> m ()
assertEq x y s = if x == y then return ()
else throw CompilerErr msg
where msg = "assertion failure (" ++ s ++ "):\n"
++ pprint x ++ " != " ++ pprint y ++ "\n\n"
++ prettyCallStack callStack ++ "\n"
-- TODO: think about the best way to handle these. This is just a
-- backwards-compatibility shim.
asCompilerErr :: Fallible m => m a -> m a
asCompilerErr cont = addContext "(This is a compiler error!)" cont
-- === search monad ===
infix 0 <!>
class (Monad m, Alternative m) => Searcher m where
-- Runs the second computation when the first yields an empty set of results.
-- This is just `<|>` for greedy searchers like `Maybe`, but in other cases,
-- like the list monad, it matters that the second computation isn't run if
-- the first succeeds.
(<!>) :: m a -> m a -> m a
-- Adds an extra error case to `FallibleM` so we can give it an Alternative
-- instance with an identity element.
newtype SearcherM a = SearcherM { runSearcherM' :: MaybeT FallibleM a }
deriving (Functor, Applicative, Monad)
runSearcherM :: SearcherM a -> Except (Maybe a)
runSearcherM m = runFallibleM $ runMaybeT (runSearcherM' m)
instance MonadFail SearcherM where
fail _ = SearcherM $ MaybeT $ return Nothing
instance Fallible SearcherM where
throwErrs e = SearcherM $ lift $ throwErrs e
addErrCtx ctx (SearcherM (MaybeT m)) = SearcherM $ MaybeT $
addErrCtx ctx $ m
instance Alternative SearcherM where
empty = SearcherM $ MaybeT $ return Nothing
SearcherM (MaybeT m1) <|> SearcherM (MaybeT m2) = SearcherM $ MaybeT do
m1 >>= \case
Just ans -> return $ Just ans
Nothing -> m2
instance Searcher SearcherM where
(<!>) = (<|>)
instance CtxReader SearcherM where
getErrCtx = SearcherM $ lift getErrCtx
instance Searcher [] where
[] <!> m = m
m <!> _ = m
instance (Monoid w, Searcher m) => Searcher (WriterT w m) where
WriterT m1 <!> WriterT m2 = WriterT (m1 <!> m2)
instance (Monoid w, Fallible m) => Fallible (WriterT w m) where
throwErrs errs = lift $ throwErrs errs
addErrCtx ctx (WriterT m) = WriterT $ addErrCtx ctx m
instance Fallible [] where
throwErrs _ = []
addErrCtx _ m = m
instance Fallible Maybe where
throwErrs _ = Nothing
addErrCtx _ m = m
-- === small pretty-printing utils ===
-- These are here instead of in PPrint.hs for import cycle reasons
pprint :: Pretty a => a -> String
pprint x = docAsStr $ pretty x
docAsStr :: Doc ann -> String
docAsStr doc = unpack $ renderStrict $ layoutPretty layout $ doc
layout :: LayoutOptions
layout = if unbounded then LayoutOptions Unbounded else defaultLayoutOptions
where unbounded = unsafePerformIO $ (Just "1"==) <$> lookupEnv "DEX_PPRINT_UNBOUNDED"
traverseMergingErrs :: (Traversable f, FallibleApplicative m)
=> (a -> m b) -> f a -> m (f b)
traverseMergingErrs f xs =
fromFallibleApplicativeWrapper $ traverse (\x -> FallibleApplicativeWrapper $ f x) xs
-- === instances ===
instance MonadFail FallibleM where
fail s = throw MonadFailErr s
instance Fallible Except where
throwErrs errs = Failure errs
addErrCtx _ (Success ans) = Success ans
addErrCtx ctx (Failure (Errs errs)) =
Failure $ Errs [Err errTy (ctx <> ctx') s | Err errTy ctx' s <- errs]
instance FallibleApplicative Except where
mergeErrs (Success x) (Success y) = Success (x, y)
mergeErrs x y = Failure (getErrs x <> getErrs y)
where getErrs :: Except a -> Errs
getErrs = \case Failure e -> e
Success _ -> mempty
instance MonadFail Except where
fail s = Failure $ Errs [Err CompilerErr mempty s]
instance Exception Errs
instance Show Errs where
show errs = pprint errs
instance Pretty Err where
pretty (Err e ctx s) = pretty e <> pretty s <> prettyCtx
-- TODO: figure out a more uniform way to newlines
where prettyCtx = case ctx of
ErrCtx _ Nothing [] Nothing -> mempty
_ -> hardline <> pretty ctx
instance Pretty ErrCtx where
pretty (ErrCtx maybeTextCtx maybePosCtx messages stack) =
-- The order of messages is outer-scope-to-inner-scope, but we want to print
-- them starting the other way around (Not for a good reason. It's just what
-- we've always done.)
prettyLines (reverse messages) <> highlightedSource <> prettyStack
where
highlightedSource = case (maybeTextCtx, maybePosCtx) of
(Just (offset, text), Just (start, stop)) ->
hardline <> pretty (highlightRegion (start - offset, stop - offset) text)
_ -> mempty
prettyStack = case stack of
Nothing -> mempty
Just s -> hardline <> "Compiler stack trace:" <> nest 2 (hardline <> prettyLines s)
instance Pretty a => Pretty (Except a) where
pretty (Success x) = "Success:" <+> pretty x
pretty (Failure e) = "Failure:" <+> pretty e
instance Pretty ErrType where
pretty e = case e of
-- NoErr tags a chunk of output that was promoted into the Err ADT
-- by appending Results.
NoErr -> ""
ParseErr -> "Parse error:"
TypeErr -> "Type error:"
KindErr -> "Kind error:"
LinErr -> "Linearity error: "
IRVariantErr -> "Internal IR validation error: "
VarDefErr -> "Error in (earlier) definition of variable: "
UnboundVarErr -> "Error: variable not in scope: "
AmbiguousVarErr -> "Error: ambiguous variable: "
RepeatedVarErr -> "Error: variable already defined: "
RepeatedPatVarErr -> "Error: variable already defined within pattern: "
InvalidPatternErr -> "Error: not a valid pattern: "
NotImplementedErr ->
"Not implemented:" <> line <>
"Please report this at github.com/google-research/dex-lang/issues\n" <> line
CompilerErr ->
"Compiler bug!" <> line <>
"Please report this at github.com/google-research/dex-lang/issues\n" <> line
DataIOErr -> "IO error: "
MiscErr -> "Error:"
RuntimeErr -> "Runtime error"
ZipErr -> "Zipping error"
EscapedNameErr -> "Leaked local variables:"
ModuleImportErr -> "Module import error: "
MonadFailErr -> "MonadFail error (internal error)"
instance Fallible m => Fallible (ReaderT r m) where
throwErrs errs = lift $ throwErrs errs
addErrCtx ctx (ReaderT f) = ReaderT \r -> addErrCtx ctx $ f r
instance Catchable m => Catchable (ReaderT r m) where
ReaderT f `catchErr` handler = ReaderT \r ->
f r `catchErr` \e -> runReaderT (handler e) r
instance FallibleApplicative m => FallibleApplicative (ReaderT r m) where
mergeErrs (ReaderT f1) (ReaderT f2) =
ReaderT \r -> mergeErrs (f1 r) (f2 r)
instance CtxReader m => CtxReader (ReaderT r m) where
getErrCtx = lift getErrCtx
instance Pretty Errs where
pretty (Errs [err]) = pretty err
pretty (Errs errs) = prettyLines errs
instance Fallible m => Fallible (StateT s m) where
throwErrs errs = lift $ throwErrs errs
addErrCtx ctx (StateT f) = StateT \s -> addErrCtx ctx $ f s
instance Catchable m => Catchable (StateT s m) where
StateT f `catchErr` handler = StateT \s ->
f s `catchErr` \e -> runStateT (handler e) s
instance CtxReader m => CtxReader (StateT s m) where
getErrCtx = lift getErrCtx
instance Semigroup ErrCtx where
ErrCtx text pos ctxStrs stk <> ErrCtx text' pos' ctxStrs' stk' =
ErrCtx (leftmostJust text text')
(rightmostJust pos pos' )
(ctxStrs <> ctxStrs')
(leftmostJust stk stk') -- We usually extend errors form the right
instance Monoid ErrCtx where
mempty = ErrCtx Nothing Nothing [] Nothing
-- === misc util stuff ===
leftmostJust :: Maybe a -> Maybe a -> Maybe a
leftmostJust (Just x) _ = Just x
leftmostJust Nothing y = y
rightmostJust :: Maybe a -> Maybe a -> Maybe a
rightmostJust = flip leftmostJust
prettyLines :: (Foldable f, Pretty a) => f a -> Doc ann
prettyLines xs = foldMap (\d -> pretty d <> hardline) xs
highlightRegion :: (Int, Int) -> String -> String
highlightRegion pos@(low, high) s
| low > high || high > length s = error $ "Bad region: \n"
++ show pos ++ "\n" ++ s
| otherwise =
-- TODO: flag to control line numbers
-- (disabling for now because it makes quine tests tricky)
-- "Line " ++ show (1 + lineNum) ++ "\n"
allLines !! lineNum ++ "\n"
++ take start (repeat ' ') ++ take (stop - start) (repeat '^') ++ "\n"
where
allLines = lines s
(lineNum, start, stop) = getPosTriple pos allLines
getPosTriple :: (Int, Int) -> [String] -> (Int, Int, Int)
getPosTriple (start, stop) lines_ = (lineNum, start - offset, stop')
where
lineLengths = map ((+1) . length) lines_
lineOffsets = cumsum lineLengths
lineNum = maxLT lineOffsets start
offset = lineOffsets !! lineNum
stop' = min (stop - offset) (lineLengths !! lineNum)
cumsum :: [Int] -> [Int]
cumsum xs = scanl (+) 0 xs
maxLT :: Ord a => [a] -> a -> Int
maxLT [] _ = 0
maxLT (x:xs) n = if n < x then -1
else 1 + maxLT xs n
|
google-research/dex-lang
|
src/lib/Err.hs
|
bsd-3-clause
| 17,510 | 0 | 16 | 4,122 | 5,326 | 2,736 | 2,590 | -1 | -1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
module Signal.Core.Stream where
import Control.Monad.Operational.Compositional
import Control.Applicative
import Control.Monad
import Prelude ((.), ($))
--------------------------------------------------------------------------------
-- * Streams
--------------------------------------------------------------------------------
-- | Imperative model of co-iterative streams
data Stream (instr :: (* -> *) -> * -> *) (a :: *)
where
Stream :: Program instr (Program instr a) -> Stream instr a
-- | `Shorthand` for streams which produce values of type `exp a`
type Str instr a = Stream instr (IExp instr a)
--------------------------------------------------------------------------------
-- **
-- | ...
repeat :: (e ~ IExp instr) => e a -> Str instr a
repeat = Stream . return . return
-- | ...
map :: (e ~ IExp instr) => (e a -> e b) -> Str instr a -> Str instr b
map f (Stream s) = Stream $ fmap (fmap f) s
--------------------------------------------------------------------------------
-- **
-- | Run stream to produce transition action
run :: Stream instr a -> Program instr a
run (Stream init) = join init
--------------------------------------------------------------------------------
|
markus-git/signal
|
src/Signal/Core/Stream.hs
|
bsd-3-clause
| 1,276 | 0 | 9 | 199 | 296 | 166 | 130 | 16 | 1 |
module Sound.Synthesis (module X) where
import Sound.Synthesis.Amplitude as X
import Sound.Synthesis.Combinator as X
import Sound.Synthesis.Constant as X
import Sound.Synthesis.Envelope as X
import Sound.Synthesis.Frequency as X
import Sound.Synthesis.Interval as X
import Sound.Synthesis.Type as X
import Sound.Synthesis.Wave as X
import Sound.Synthesis.Write as X
|
pskrz/Synthesis
|
src/Sound/Synthesis.hs
|
bsd-3-clause
| 366 | 0 | 4 | 41 | 84 | 62 | 22 | 10 | 0 |
module OpenCV.Unsafe
( unsafeCoerceMat
, unsafeCoerceMatM
-- * Mutable Matrix
, unsafeFreeze
, unsafeThaw
, unsafeRead
, unsafeWrite
) where
import "base" Foreign.Storable ( Storable, peek, poke )
import "primitive" Control.Monad.Primitive
( PrimMonad, PrimState, unsafePrimToPrim )
import "this" OpenCV.Internal.Core.Types.Mat
import "this" OpenCV.Internal.Mutable
unsafeRead
:: (PrimMonad m, Storable value)
=> Mut (Mat shape channels depth) (PrimState m)
-> [Int]
-> m value
unsafeRead matM pos =
unsafePrimToPrim $ withMatData (unMut matM) $ \step dataPtr ->
let elemPtr = matElemAddress dataPtr (fromIntegral <$> step) pos
in peek elemPtr
unsafeWrite
:: (PrimMonad m, Storable value)
=> Mut (Mat shape channels depth) (PrimState m)
-> [Int] -> value -> m ()
unsafeWrite matM pos value =
unsafePrimToPrim $ withMatData (unMut matM) $ \step dataPtr ->
let elemPtr = matElemAddress dataPtr (fromIntegral <$> step) pos
in poke elemPtr value
|
Cortlandd/haskell-opencv
|
src/OpenCV/Unsafe.hs
|
bsd-3-clause
| 1,043 | 0 | 13 | 235 | 322 | 174 | 148 | -1 | -1 |
-- | The main prover loop.
{-# LANGUAGE RecordWildCards, MultiParamTypeClasses, GADTs, BangPatterns, OverloadedStrings, ScopedTypeVariables, GeneralizedNewtypeDeriving, PatternGuards, TypeFamilies #-}
module Twee where
import Twee.Base
import Twee.Rule hiding (normalForms)
import qualified Twee.Rule as Rule
import Twee.Equation
import qualified Twee.Proof as Proof
import Twee.Proof(Axiom(..), Proof(..), ProvedGoal(..), provedGoal, certify, derivation)
import Twee.CP hiding (Config)
import qualified Twee.CP as CP
import Twee.Join hiding (Config, defaultConfig)
import qualified Twee.Join as Join
import qualified Twee.Rule.Index as RuleIndex
import Twee.Rule.Index(RuleIndex(..))
import qualified Twee.Index as Index
import Twee.Index(Index)
import Twee.Constraints
import Twee.Utils
import Twee.Task
import qualified Twee.PassiveQueue as Queue
import Twee.PassiveQueue(Queue, Passive(..))
import qualified Data.IntMap.Strict as IntMap
import Data.IntMap(IntMap)
import Data.Maybe
import Data.List
import Data.Function
import qualified Data.Map.Strict as Map
import Data.Map(Map)
import Data.Int
import Data.Ord
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import qualified Control.Monad.Trans.State.Strict as StateM
----------------------------------------------------------------------
-- * Configuration and prover state.
----------------------------------------------------------------------
-- | The prover configuration.
data Config f =
Config {
cfg_accept_term :: Maybe (Term f -> Bool),
cfg_max_critical_pairs :: Int64,
cfg_max_cp_depth :: Int,
cfg_simplify :: Bool,
cfg_renormalise_percent :: Int,
cfg_cp_sample_size :: Int,
cfg_renormalise_threshold :: Int,
cfg_set_join_goals :: Bool,
cfg_always_simplify :: Bool,
cfg_critical_pairs :: CP.Config,
cfg_join :: Join.Config,
cfg_proof_presentation :: Proof.Config f }
-- | The prover state.
data State f =
State {
st_rules :: !(RuleIndex f (ActiveRule f)),
st_active_ids :: !(IntMap (Active f)),
st_rule_ids :: !(IntMap (ActiveRule f)),
st_joinable :: !(Index f (Equation f)),
st_goals :: ![Goal f],
st_queue :: !(Queue Params),
st_next_active :: {-# UNPACK #-} !Id,
st_next_rule :: {-# UNPACK #-} !RuleId,
st_considered :: {-# UNPACK #-} !Int64,
st_simplified_at :: {-# UNPACK #-} !Id,
st_cp_sample :: ![Maybe (Overlap f)],
st_cp_next_sample :: ![(Integer, Int)],
st_num_cps :: !Integer,
st_messages_rev :: ![Message f] }
-- | The default prover configuration.
defaultConfig :: Config f
defaultConfig =
Config {
cfg_accept_term = Nothing,
cfg_max_critical_pairs = maxBound,
cfg_max_cp_depth = maxBound,
cfg_simplify = True,
cfg_renormalise_percent = 5,
cfg_renormalise_threshold = 20,
cfg_cp_sample_size = 100,
cfg_set_join_goals = True,
cfg_always_simplify = False,
cfg_critical_pairs = CP.defaultConfig,
cfg_join = Join.defaultConfig,
cfg_proof_presentation = Proof.defaultConfig }
-- | Does this configuration run the prover in a complete mode?
configIsComplete :: Config f -> Bool
configIsComplete Config{..} =
isNothing (cfg_accept_term) &&
cfg_max_critical_pairs == maxBound &&
cfg_max_cp_depth == maxBound
-- | The initial state.
initialState :: Config f -> State f
initialState Config{..} =
State {
st_rules = RuleIndex.empty,
st_active_ids = IntMap.empty,
st_rule_ids = IntMap.empty,
st_joinable = Index.empty,
st_goals = [],
st_queue = Queue.empty,
st_next_active = 1,
st_next_rule = 0,
st_considered = 0,
st_simplified_at = 1,
st_cp_sample = [],
st_cp_next_sample = reservoir cfg_cp_sample_size,
st_num_cps = 0,
st_messages_rev = [] }
----------------------------------------------------------------------
-- * Messages.
----------------------------------------------------------------------
-- | A message which is produced by the prover when something interesting happens.
data Message f =
-- | A new rule.
NewActive !(Active f)
-- | A new joinable equation.
| NewEquation !(Equation f)
-- | A rule was deleted.
| DeleteActive !(Active f)
-- | The CP queue was simplified.
| SimplifyQueue
-- | The rules were reduced wrt each other.
| Interreduce
-- | Status update: how many queued critical pairs there are.
| Status !Int
instance Function f => Pretty (Message f) where
pPrint (NewActive rule) = pPrint rule
pPrint (NewEquation eqn) =
text " (hard)" <+> pPrint eqn
pPrint (DeleteActive rule) =
text " (delete rule " <#> pPrint (active_id rule) <#> text ")"
pPrint SimplifyQueue =
text " (simplifying queued critical pairs...)"
pPrint Interreduce =
text " (simplifying rules with respect to one another...)"
pPrint (Status n) =
text " (" <#> pPrint n <+> text "queued critical pairs)"
-- | Emit a message.
message :: PrettyTerm f => Message f -> State f -> State f
message !msg state@State{..} =
state { st_messages_rev = msg:st_messages_rev }
-- | Forget about all emitted messages.
clearMessages :: State f -> State f
clearMessages state@State{..} =
state { st_messages_rev = [] }
-- | Get all emitted messages.
messages :: State f -> [Message f]
messages state = reverse (st_messages_rev state)
----------------------------------------------------------------------
-- * The CP queue.
----------------------------------------------------------------------
data Params
instance Queue.Params Params where
type Score Params = Int
type Id Params = RuleId
type PackedId Params = Int32
type PackedScore Params = Int32
packScore _ = fromIntegral
unpackScore _ = fromIntegral
packId _ = fromIntegral
unpackId _ = fromIntegral
-- | Compute all critical pairs from a rule.
{-# INLINEABLE makePassives #-}
{-# SCC makePassives #-}
makePassives :: Function f => Config f -> State f -> ActiveRule f -> [Passive Params]
makePassives Config{..} State{..} rule =
[ Passive (fromIntegral (score cfg_critical_pairs o)) (rule_rid rule1) (rule_rid rule2) (fromIntegral (overlap_pos o))
| (rule1, rule2, o) <- overlaps (Depth cfg_max_cp_depth) (index_oriented st_rules) rules rule ]
where
rules = IntMap.elems st_rule_ids
-- | Turn a Passive back into an overlap.
-- Doesn't try to simplify it.
{-# INLINEABLE findPassive #-}
{-# SCC findPassive #-}
findPassive :: forall f. Function f => State f -> Passive Params -> Maybe (ActiveRule f, ActiveRule f, Overlap f)
findPassive State{..} Passive{..} = do
rule1 <- IntMap.lookup (fromIntegral passive_rule1) st_rule_ids
rule2 <- IntMap.lookup (fromIntegral passive_rule2) st_rule_ids
let !depth = 1 + max (the rule1) (the rule2)
overlap <-
overlapAt (fromIntegral passive_pos) depth
(renameAvoiding (the rule2 :: Rule f) (the rule1)) (the rule2)
return (rule1, rule2, overlap)
-- | Renormalise a queued Passive.
{-# INLINEABLE simplifyPassive #-}
{-# SCC simplifyPassive #-}
simplifyPassive :: Function f => Config f -> State f -> Passive Params -> Maybe (Passive Params)
simplifyPassive Config{..} state@State{..} passive = do
(_, _, overlap) <- findPassive state passive
overlap <- simplifyOverlap (index_oriented st_rules) overlap
return passive {
passive_score = fromIntegral $
fromIntegral (passive_score passive) `intMin`
score cfg_critical_pairs overlap }
-- | Check if we should renormalise the queue.
{-# INLINEABLE shouldSimplifyQueue #-}
shouldSimplifyQueue :: Function f => Config f -> State f -> Bool
shouldSimplifyQueue Config{..} State{..} =
length (filter isNothing st_cp_sample) * 100 >= cfg_renormalise_threshold * cfg_cp_sample_size
-- | Renormalise the entire queue.
{-# INLINEABLE simplifyQueue #-}
{-# SCC simplifyQueue #-}
simplifyQueue :: Function f => Config f -> State f -> State f
simplifyQueue config state =
resetSample config state { st_queue = simp (st_queue state) }
where
simp =
Queue.mapMaybe (simplifyPassive config state)
-- | Enqueue a set of critical pairs.
{-# INLINEABLE enqueue #-}
{-# SCC enqueue #-}
enqueue :: Function f => State f -> RuleId -> [Passive Params] -> State f
enqueue state rule passives =
state { st_queue = Queue.insert rule passives (st_queue state) }
-- | Dequeue a critical pair.
--
-- Also takes care of:
--
-- * removing any orphans from the head of the queue
-- * ignoring CPs that are too big
{-# INLINEABLE dequeue #-}
{-# SCC dequeue #-}
dequeue :: Function f => Config f -> State f -> (Maybe (CriticalPair f, ActiveRule f, ActiveRule f), State f)
dequeue Config{..} state@State{..} =
case deq 0 st_queue of
-- Explicitly make the queue empty, in case it e.g. contained a
-- lot of orphans
Nothing -> (Nothing, state { st_queue = Queue.empty })
Just (overlap, n, queue) ->
(Just overlap,
state { st_queue = queue, st_considered = st_considered + n })
where
deq !n queue = do
(passive, queue) <- Queue.removeMin queue
case findPassive state passive of
Just (rule1, rule2, overlap@Overlap{overlap_eqn = t :=: u})
| fromMaybe True (cfg_accept_term <*> pure t),
fromMaybe True (cfg_accept_term <*> pure u),
cp <- makeCriticalPair rule1 rule2 overlap ->
return ((cp, rule1, rule2), n+1, queue)
_ -> deq (n+1) queue
----------------------------------------------------------------------
-- * Active rewrite rules.
----------------------------------------------------------------------
data Active f =
Active {
active_id :: {-# UNPACK #-} !Id,
active_depth :: {-# UNPACK #-} !Depth,
active_rule :: {-# UNPACK #-} !(Rule f),
active_top :: !(Maybe (Term f)),
active_proof :: {-# UNPACK #-} !(Proof f),
-- A model in which the rule is false (used when reorienting)
active_model :: !(Model f),
active_rules :: ![ActiveRule f] }
active_cp :: Active f -> CriticalPair f
active_cp Active{..} =
CriticalPair {
cp_eqn = unorient active_rule,
cp_depth = active_depth,
cp_top = active_top,
cp_proof = derivation active_proof }
-- An active oriented in a particular direction.
data ActiveRule f =
ActiveRule {
rule_active :: {-# UNPACK #-} !Id,
rule_rid :: {-# UNPACK #-} !RuleId,
rule_depth :: {-# UNPACK #-} !Depth,
rule_rule :: {-# UNPACK #-} !(Rule f),
rule_positions :: !(Positions f) }
instance PrettyTerm f => Symbolic (ActiveRule f) where
type ConstantOf (ActiveRule f) = f
termsDL ActiveRule{..} =
termsDL rule_rule
subst_ sub r@ActiveRule{..} =
r {
rule_rule = rule',
rule_positions = positions (lhs rule') }
where
rule' = subst_ sub rule_rule
instance Eq (Active f) where
(==) = (==) `on` active_id
instance Eq (ActiveRule f) where
(==) = (==) `on` rule_rid
instance Function f => Pretty (Active f) where
pPrint Active{..} =
pPrint active_id <#> text "." <+> pPrint (canonicalise active_rule)
instance Has (ActiveRule f) Id where the = rule_active
instance Has (ActiveRule f) RuleId where the = rule_rid
instance Has (ActiveRule f) Depth where the = rule_depth
instance f ~ g => Has (ActiveRule f) (Rule g) where the = rule_rule
instance f ~ g => Has (ActiveRule f) (Positions g) where the = rule_positions
newtype RuleId = RuleId Id deriving (Eq, Ord, Show, Num, Real, Integral, Enum)
-- Add a new active.
{-# INLINEABLE addActive #-}
{-# SCC addActive #-}
addActive :: Function f => Config f -> State f -> (Id -> RuleId -> RuleId -> Active f) -> State f
addActive config state@State{..} active0 =
let
active@Active{..} = active0 st_next_active st_next_rule (succ st_next_rule)
state' =
message (NewActive active) $
addActiveOnly state{st_next_active = st_next_active+1, st_next_rule = st_next_rule+2} active
in if subsumed st_joinable st_rules (unorient active_rule) then
state
else
normaliseGoals config $
foldl' enqueueRule state' active_rules
where
enqueueRule state rule =
sample config (length passives) passives $
enqueue state (the rule) passives
where
passives = makePassives config state rule
-- Update the list of sampled critical pairs.
{-# INLINEABLE sample #-}
sample :: Function f => Config f -> Int -> [Passive Params] -> State f -> State f
sample cfg m passives state@State{st_cp_next_sample = ((n, pos):rest), ..}
| idx < fromIntegral m =
sample cfg m passives state {
st_cp_next_sample = rest,
st_cp_sample =
take pos st_cp_sample ++
[find (passives !! fromIntegral idx)] ++
drop (pos+1) st_cp_sample }
| otherwise = state{st_num_cps = st_num_cps + fromIntegral m}
where
idx = n - st_num_cps
find passive = do
(_, _, overlap) <- findPassive state passive
simplifyOverlap (index_oriented st_rules) overlap
-- Reset the list of sampled critical pairs.
{-# INLINEABLE resetSample #-}
resetSample :: Function f => Config f -> State f -> State f
resetSample cfg@Config{..} state@State{..} =
foldl' sample1 state' (Queue.toList st_queue)
where
state' =
state {
st_num_cps = 0,
st_cp_next_sample = reservoir cfg_cp_sample_size,
st_cp_sample = [] }
sample1 state (n, passives) = sample cfg n passives state
-- Simplify the sampled critical pairs.
-- (A sampled critical pair is replaced with Nothing if it can be
-- simplified.)
{-# INLINEABLE simplifySample #-}
simplifySample :: Function f => State f -> State f
simplifySample state@State{..} =
state{st_cp_sample = map (>>= simp) st_cp_sample}
where
simp overlap = do
overlap' <- simplifyOverlap (index_oriented st_rules) overlap
guard (overlap_eqn overlap == overlap_eqn overlap')
return overlap
-- Add an active without generating critical pairs. Used in interreduction.
{-# INLINEABLE addActiveOnly #-}
addActiveOnly :: Function f => State f -> Active f -> State f
addActiveOnly state@State{..} active@Active{..} =
state {
st_rules = foldl' insertRule st_rules active_rules,
st_active_ids = IntMap.insert (fromIntegral active_id) active st_active_ids,
st_rule_ids = foldl' insertRuleId st_rule_ids active_rules }
where
insertRule rules rule@ActiveRule{..} =
RuleIndex.insert (lhs rule_rule) rule rules
insertRuleId rules rule@ActiveRule{..} =
IntMap.insert (fromIntegral rule_rid) rule rules
-- Delete an active. Used in interreduction, not suitable for general use.
{-# INLINE deleteActive #-}
deleteActive :: Function f => State f -> Active f -> State f
deleteActive state@State{..} Active{..} =
state {
st_rules = foldl' deleteRule st_rules active_rules,
st_active_ids = IntMap.delete (fromIntegral active_id) st_active_ids,
st_rule_ids = foldl' deleteRuleId st_rule_ids active_rules }
where
deleteRule rules rule =
RuleIndex.delete (lhs (rule_rule rule)) rule rules
deleteRuleId rules ActiveRule{..} =
IntMap.delete (fromIntegral rule_rid) rules
-- Try to join a critical pair.
{-# INLINEABLE consider #-}
consider :: Function f => Config f -> State f -> CriticalPair f -> State f
consider config state cp =
considerUsing (st_rules state) config state cp
-- Try to join a critical pair, but using a different set of critical
-- pairs for normalisation.
{-# INLINEABLE considerUsing #-}
{-# SCC considerUsing #-}
considerUsing ::
Function f =>
RuleIndex f (ActiveRule f) -> Config f -> State f -> CriticalPair f -> State f
considerUsing rules config@Config{..} state@State{..} cp0 =
-- Important to canonicalise the rule so that we don't get
-- bigger and bigger variable indices over time
let cp = canonicalise cp0 in
case joinCriticalPair cfg_join st_joinable rules Nothing cp of
Right (mcp, cps) ->
let
state' = foldl' (considerUsing rules config) state cps
in case mcp of
Just cp -> addJoinable state' (cp_eqn cp)
Nothing -> state'
Left (cp, model) ->
foldl' (addCP config model) state (split cp)
{-# INLINEABLE addCP #-}
addCP :: Function f => Config f -> Model f -> State f -> CriticalPair f -> State f
addCP config model state@State{..} CriticalPair{..} =
let
pf = certify cp_proof
rule = orient cp_eqn (Proof.simpleLemma pf)
makeRule n k r =
ActiveRule {
rule_active = n,
rule_rid = k,
rule_depth = cp_depth,
rule_rule = r rule,
rule_positions = positions (lhs (r rule)) }
in
addActive config state $ \n k1 k2 ->
Active {
active_id = n,
active_depth = cp_depth,
active_rule = rule,
active_model = model,
active_top = cp_top,
active_proof = pf,
active_rules =
usortBy (comparing (canonicalise . rule_rule)) $
makeRule n k1 id:
[ makeRule n k2 backwards
| not (oriented (orientation rule)) ] }
-- Add a new equation.
{-# INLINEABLE addAxiom #-}
addAxiom :: Function f => Config f -> State f -> Axiom f -> State f
addAxiom config state axiom =
consider config state $
CriticalPair {
cp_eqn = axiom_eqn axiom,
cp_depth = 0,
cp_top = Nothing,
cp_proof = Proof.axiom axiom }
-- Record an equation as being joinable.
{-# INLINEABLE addJoinable #-}
addJoinable :: Function f => State f -> Equation f -> State f
addJoinable state eqn@(t :=: u) =
message (NewEquation eqn) $
state {
st_joinable =
Index.insert t (t :=: u) $
Index.insert u (u :=: t) (st_joinable state) }
-- For goal terms we store the set of all their normal forms.
-- Name and number are for information only.
data Goal f =
Goal {
goal_name :: String,
goal_number :: Int,
goal_eqn :: Equation f,
goal_lhs :: Map (Term f) (Reduction f),
goal_rhs :: Map (Term f) (Reduction f) }
-- Add a new goal.
{-# INLINEABLE addGoal #-}
addGoal :: Function f => Config f -> State f -> Goal f -> State f
addGoal config state@State{..} goal =
normaliseGoals config state { st_goals = goal:st_goals }
-- Normalise all goals.
{-# INLINEABLE normaliseGoals #-}
normaliseGoals :: Function f => Config f -> State f -> State f
normaliseGoals Config{..} state@State{..} =
state {
st_goals =
map (goalMap (nf (rewrite reduces (index_all st_rules)))) st_goals }
where
goalMap f goal@Goal{..} =
goal { goal_lhs = f (eqn_lhs goal_eqn) goal_lhs, goal_rhs = f (eqn_rhs goal_eqn) goal_rhs }
nf reduce t0 goals
| cfg_set_join_goals = Rule.normalForms reduce goals
| otherwise =
Map.fromList $
[ (result t0 q, q)
| (t, r) <- Map.toList goals,
let q = r `trans` Rule.normaliseWith (const True) reduce t ]
-- Recompute all normal forms of all goals. Starts from the original goal term.
-- Different from normalising all goals, because there may be an intermediate
-- term on one of the reduction paths which we can now rewrite in a different
-- way.
{-# INLINEABLE recomputeGoals #-}
recomputeGoals :: Function f => Config f -> State f -> State f
recomputeGoals config state =
-- Make this strict so that newTask can time it correctly
forceList (map goal_lhs (st_goals state')) `seq`
forceList (map goal_rhs (st_goals state')) `seq`
state'
where
state' =
normaliseGoals config (state { st_goals = map reset (st_goals state) })
reset goal@Goal{goal_eqn = t :=: u, ..} =
goal { goal_lhs = Map.singleton t [],
goal_rhs = Map.singleton u [] }
forceList [] = ()
forceList (x:xs) = x `seq` forceList xs
-- Create a goal.
{-# INLINE goal #-}
goal :: Int -> String -> Equation f -> Goal f
goal n name (t :=: u) =
Goal {
goal_name = name,
goal_number = n,
goal_eqn = t :=: u,
goal_lhs = Map.singleton t [],
goal_rhs = Map.singleton u [] }
----------------------------------------------------------------------
-- Interreduction.
----------------------------------------------------------------------
-- Simplify all rules.
{-# INLINEABLE interreduce #-}
{-# SCC interreduce #-}
interreduce :: Function f => Config f -> State f -> State f
interreduce _ state@State{..} | st_simplified_at == st_next_active = state
interreduce config@Config{..} state =
let
state' =
foldl' (interreduce1 config)
-- Clear out st_joinable, since we don't know which
-- equations have made use of each active.
state { st_joinable = Index.empty }
(IntMap.elems (st_active_ids state))
in state' { st_joinable = st_joinable state, st_simplified_at = st_next_active state' }
{-# INLINEABLE interreduce1 #-}
interreduce1 :: Function f => Config f -> State f -> Active f -> State f
interreduce1 config@Config{..} state active =
-- Exclude the active from the rewrite rules when testing
-- joinability, otherwise it will be trivially joinable.
case
joinCriticalPair cfg_join
Index.empty -- (st_joinable state)
(st_rules (deleteActive state active))
(Just (active_model active)) (active_cp active)
of
Right (_, cps) ->
flip (foldl' (consider config)) cps $
message (DeleteActive active) $
deleteActive state active
Left (cp, model)
| cp_eqn cp `simplerThan` cp_eqn (active_cp active) ->
flip (foldl' (consider config)) (split cp) $
message (DeleteActive active) $
deleteActive state active
| model /= active_model active ->
flip addActiveOnly active { active_model = model } $
deleteActive state active
| otherwise ->
state
----------------------------------------------------------------------
-- The main loop.
----------------------------------------------------------------------
data Output m f =
Output {
output_message :: Message f -> m () }
{-# INLINE complete #-}
complete :: (Function f, MonadIO m) => Output m f -> Config f -> State f -> m (State f)
complete Output{..} config@Config{..} state =
flip StateM.execStateT state $ do
tasks <- sequence
[newTask 10 (fromIntegral cfg_renormalise_percent / 100) $ do
state <- StateM.get
when (shouldSimplifyQueue config state) $ do
lift $ output_message SimplifyQueue
StateM.put $! simplifyQueue config state,
newTask 1 0.05 $ do
when cfg_simplify $ do
lift $ output_message Interreduce
state <- StateM.get
StateM.put $! simplifySample $! interreduce config state,
newTask 1 0.02 $ do
state <- StateM.get
StateM.put $! recomputeGoals config state,
newTask 60 0.01 $ do
State{..} <- StateM.get
let !n = Queue.queueSize st_queue
lift $ output_message (Status n)]
let
loop = do
progress <- StateM.state (complete1 config)
when cfg_always_simplify $ do
lift $ output_message Interreduce
state <- StateM.get
StateM.put $! simplifySample $! interreduce config state
state <- StateM.get
lift $ mapM_ output_message (messages state)
StateM.put (clearMessages state)
mapM_ checkTask tasks
when progress loop
loop
{-# INLINEABLE complete1 #-}
complete1 :: Function f => Config f -> State f -> (Bool, State f)
complete1 config@Config{..} state
| st_considered state >= cfg_max_critical_pairs =
(False, state)
| solved state = (False, state)
| otherwise =
case dequeue config state of
(Nothing, state) -> (False, state)
(Just (overlap, _, _), state) ->
(True, consider config state overlap)
{-# INLINEABLE solved #-}
solved :: Function f => State f -> Bool
solved = not . null . solutions
-- Return whatever goals we have proved and their proofs.
{-# INLINEABLE solutions #-}
{-# SCC solutions #-}
solutions :: Function f => State f -> [ProvedGoal f]
solutions State{..} = do
Goal{goal_lhs = ts, goal_rhs = us, ..} <- st_goals
let sols = Map.keys (Map.intersection ts us)
guard (not (null sols))
let sol:_ = sols
let t = ts Map.! sol
u = us Map.! sol
-- Strict so that we check the proof before returning a solution
!p =
Proof.certify $
reductionProof (eqn_lhs goal_eqn) t `Proof.trans`
Proof.symm (reductionProof (eqn_rhs goal_eqn) u)
return (provedGoal goal_number goal_name p)
-- Return all current rewrite rules.
{-# INLINEABLE rules #-}
rules :: Function f => State f -> [Rule f]
rules = map active_rule . IntMap.elems . st_active_ids
----------------------------------------------------------------------
-- For code which uses twee as a library.
----------------------------------------------------------------------
{-# INLINEABLE completePure #-}
completePure :: Function f => Config f -> State f -> State f
completePure cfg state
| progress = completePure cfg (clearMessages state')
| otherwise = state'
where
(progress, state') = complete1 cfg state
{-# INLINEABLE normaliseTerm #-}
normaliseTerm :: Function f => State f -> Term f -> Reduction f
normaliseTerm State{..} t =
normaliseWith (const True) (rewrite reduces (index_all st_rules)) t
{-# INLINEABLE normalForms #-}
normalForms :: Function f => State f -> Term f -> Map (Term f) (Reduction f)
normalForms State{..} t =
Rule.normalForms (rewrite reduces (index_all st_rules)) (Map.singleton t [])
{-# INLINEABLE simplifyTerm #-}
simplifyTerm :: Function f => State f -> Term f -> Term f
simplifyTerm State{..} t =
simplify (index_oriented st_rules) t
|
nick8325/kbc
|
src/Twee.hs
|
bsd-3-clause
| 25,575 | 0 | 19 | 5,607 | 7,355 | 3,833 | 3,522 | -1 | -1 |
module HplAssets.Clouds (
transformCloud,
emptyCloud
) where
import BasicTypes
import HplAssets.Cloud.Types
import FeatureModel.Types
import Data.Generics
import Data.List
emptyCloud :: CloudModel -> CloudModel
emptyCloud cloudmodel = cloudmodel { clouds = [] }
transformCloud :: CloudTransformation -> CloudModel -> FeatureConfiguration -> CloudModel -> CloudModel
transformCloud (SelectAllClouds) spl _ product = spl
transformCloud (SelectClouds ids) spl _ product = product {clouds = cs}
where
selected = [c | c <- (clouds spl) , (cloudId c) `elem` ids]
cs = nub $ (clouds product) ++ selected
transformReq (RemoveClouds ids) spl _ product = product { clouds = cs}
where
cs = [c | c <- (clouds product), not ((cloudId c) `elem` ids)]
|
alessandroleite/hephaestus-pl
|
src/meta-hephaestus/HplAssets/Clouds.hs
|
lgpl-3.0
| 799 | 0 | 13 | 170 | 264 | 146 | 118 | 17 | 1 |
type Matrix a = [Row a]
type Row a = [a]
type Grid = Matrix Digit
type Digit = Char
digits :: [Char]
digits = ['1' .. '9']
blank :: Digit -> Bool
blank = (== '0')
solve :: Grid -> [Grid]
solve = filter valid . completions
completions :: Grid -> [Grid]
completions d = []
valid :: Grid -> Bool
valid d = False
|
trymilix/cookbooks
|
Software/haskell/sudoku.hs
|
apache-2.0
| 316 | 0 | 6 | 73 | 139 | 81 | 58 | 14 | 1 |
module EditWebhook where
import Github.Repos.Webhooks
import qualified Github.Auth as Auth
import Github.Data.Definitions
main :: IO ()
main = do
let auth = Auth.OAuth "oauthtoken"
let editWebhookDef = EditRepoWebhook {
editRepoWebhookRemoveEvents = Just [WebhookWildcardEvent],
editRepoWebhookAddEvents = Just [WebhookCommitCommentEvent, WebhookGollumEvent],
editRepoWebhookConfig = Nothing,
editRepoWebhookEvents = Nothing,
editRepoWebhookActive = Just True
}
newWebhook <- editRepoWebhook' auth "repoOwner" "repoName" 123 editWebhookDef
case newWebhook of
(Left err) -> putStrLn $ "Error: " ++ (show err)
(Right webhook) -> putStrLn $ formatRepoWebhook webhook
formatRepoWebhook :: RepoWebhook -> String
formatRepoWebhook (RepoWebhook _ _ _ name _ _ _ _ _ _) = show name
|
jwiegley/github
|
samples/Repos/Webhooks/EditWebhook.hs
|
bsd-3-clause
| 839 | 0 | 13 | 162 | 226 | 120 | 106 | 19 | 2 |
module Jana.ErrorMessages where
import Text.Printf
import Jana.Error
import Jana.Ast
aliasError :: Ident -> Ident -> Message
aliasError id1 id2 = Message $
printf "Identifiers `%s' and `%s' are aliases" (ident id1) (ident id2)
unboundVar :: String -> Message
unboundVar name = Message $
printf "Variable `%s' has not been declared" name
alreadyBound :: String -> Message
alreadyBound name = Message $
printf "Variable name `%s' is already bound" name
typeError :: String -> Message
typeError = Message
typeMismatch :: [String] -> String -> Message
typeMismatch expTypes actualType = Message $
printf "Couldn't match expected type %s\n\
\ with actual type `%s'" (join expTypes) actualType
where join [] = ""
join [x] = quote x
join [x, y] = quote x ++ " or " ++ quote y
join (x:xs) = quote x ++ ", " ++ join xs
quote s = "`" ++ s ++ "'"
swapTypeError :: String -> String -> Message
swapTypeError typ1 typ2 = Message $
printf "Can't swap variables of type `%s' and `%s'" typ1 typ2
outOfBounds :: (PrintfArg a) => a -> a -> Message
outOfBounds index size = Message $
printf "Array index `%d' was out of bounds (array size was %d)"
index size
emptyStack :: Message
emptyStack = Message "Can't pop from empty stack"
popToNonZero :: Ident -> Message
popToNonZero id = Message $
printf "Can't pop to non-zero variable `%s'" (ident id)
assertionFail :: String -> Message
assertionFail s = Message $
"Assertion failed: " ++ s
delocalNameMismatch :: Ident -> Ident -> Message
delocalNameMismatch id1 id2 = Message $
printf "Variable names does not match in local declaration:\n\
\ `%s' in `local'\n\
\ `%s' in `delocal'\n\
\`delocal' statements must come in reverse order of the `local' statments"
(ident id1) (ident id2)
delocalTypeMismatch :: Ident -> String -> String -> Message
delocalTypeMismatch id locType delocType = Message $
printf "Type of variable `%s' does not match local declaration:\n\
\ `%s' in `local'\n\
\ `%s' in `delocal'"
(ident id) locType delocType
wrongDelocalValue :: Ident -> String -> String -> Message
wrongDelocalValue id expect actual = Message $
printf "Expected value to be `%s' for local variable `%s'\n\
\ but actual value is `%s'"
expect (ident id) actual
undefProc :: String -> Message
undefProc name = Message $
printf "Procedure `%s' is not defined" name
procDefined :: (Identifiable a) => a -> Message
procDefined id = Message $
printf "Procedure `%s' is already defined" (ident id)
callingMainError :: Message
callingMainError = Message "It is not allowed to call the `main' procedure"
argumentError :: (Identifiable a, PrintfArg b) => a -> b -> b -> Message
argumentError id expect actual = Message $
printf "Procedure `%s' expects %d argument(s) but got %d"
(ident id) expect actual
arraySize :: Message
arraySize = Message "Array size must be greater than or equal to one"
arraySizeMissing :: Ident -> Message
arraySizeMissing id = Message $
printf "Array size missing for variable `%s'" (ident id)
arraySizeMismatch :: (PrintfArg a, PrintfArg b) => a -> b -> Message
arraySizeMismatch exp actual = Message $
printf "Expecting array of size %d\n\
\ but got size %d"
exp actual
divisionByZero :: Message
divisionByZero = Message "Division by zero"
noMainProc :: Message
noMainProc = Message "No main procedure has been defined"
multipleMainProcs :: Message
multipleMainProcs = Message "Multiple main procedures has been defined"
procDuplicateArgs :: Proc -> Message
procDuplicateArgs id = Message $
printf "Procedure `%s' has duplicate arguments" (ident id)
userError :: String -> Message
userError msg = Message $ "User error: " ++ msg
printfTypeMismatch :: Char -> String -> String -> Message
printfTypeMismatch char expected given = Message $
printf "Type mismatch for `%%%c' format specifier\n\
\Expected argument of type `%s'\n\
\ but actual type was `%s'" char expected given
printfTooManyArgs :: Message
printfTooManyArgs = Message $
"Not all arguments where used during string formatting"
printfNotEnoughArgs :: Message
printfNotEnoughArgs = Message $
"Not enough arguments for format string"
printfUnrecognizedType :: Char -> Message
printfUnrecognizedType char = Message $
printf "Unrecognized format specifier: `%%%c'" char
|
mbudde/jana
|
src/Jana/ErrorMessages.hs
|
bsd-3-clause
| 4,465 | 0 | 9 | 961 | 993 | 508 | 485 | 96 | 4 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Data/Text/Internal/Encoding/Fusion.hs" #-}
{-# LANGUAGE BangPatterns, CPP, Rank2Types #-}
-- |
-- Module : Data.Text.Internal.Encoding.Fusion
-- Copyright : (c) Tom Harper 2008-2009,
-- (c) Bryan O'Sullivan 2009,
-- (c) Duncan Coutts 2009
--
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- /Warning/: this is an internal module, and does not have a stable
-- API or name. Functions in this module may not check or enforce
-- preconditions expected by public modules. Use at your own risk!
--
-- Fusible 'Stream'-oriented functions for converting between 'Text'
-- and several common encodings.
module Data.Text.Internal.Encoding.Fusion
(
-- * Streaming
streamASCII
, streamUtf8
, streamUtf16LE
, streamUtf16BE
, streamUtf32LE
, streamUtf32BE
-- * Unstreaming
, unstream
, module Data.Text.Internal.Encoding.Fusion.Common
) where
import Data.ByteString.Internal (ByteString(..), mallocByteString, memcpy)
import Data.Text.Internal.Fusion (Step(..), Stream(..))
import Data.Text.Internal.Fusion.Size
import Data.Text.Encoding.Error
import Data.Text.Internal.Encoding.Fusion.Common
import Data.Text.Internal.Unsafe.Char (unsafeChr, unsafeChr8, unsafeChr32)
import Data.Text.Internal.Unsafe.Shift (shiftL, shiftR)
import Data.Word (Word8, Word16, Word32)
import Foreign.ForeignPtr (withForeignPtr, ForeignPtr)
import Foreign.Storable (pokeByteOff)
import qualified Data.ByteString as B
import qualified Data.ByteString.Unsafe as B
import qualified Data.Text.Internal.Encoding.Utf8 as U8
import qualified Data.Text.Internal.Encoding.Utf16 as U16
import qualified Data.Text.Internal.Encoding.Utf32 as U32
import Data.Text.Unsafe (unsafeDupablePerformIO)
streamASCII :: ByteString -> Stream Char
streamASCII bs = Stream next 0 (maxSize l)
where
l = B.length bs
{-# INLINE next #-}
next i
| i >= l = Done
| otherwise = Yield (unsafeChr8 x1) (i+1)
where
x1 = B.unsafeIndex bs i
{-# DEPRECATED streamASCII "Do not use this function" #-}
{-# INLINE [0] streamASCII #-}
-- | /O(n)/ Convert a 'ByteString' into a 'Stream Char', using UTF-8
-- encoding.
streamUtf8 :: OnDecodeError -> ByteString -> Stream Char
streamUtf8 onErr bs = Stream next 0 (maxSize l)
where
l = B.length bs
next i
| i >= l = Done
| U8.validate1 x1 = Yield (unsafeChr8 x1) (i+1)
| i+1 < l && U8.validate2 x1 x2 = Yield (U8.chr2 x1 x2) (i+2)
| i+2 < l && U8.validate3 x1 x2 x3 = Yield (U8.chr3 x1 x2 x3) (i+3)
| i+3 < l && U8.validate4 x1 x2 x3 x4 = Yield (U8.chr4 x1 x2 x3 x4) (i+4)
| otherwise = decodeError "streamUtf8" "UTF-8" onErr (Just x1) (i+1)
where
x1 = idx i
x2 = idx (i + 1)
x3 = idx (i + 2)
x4 = idx (i + 3)
idx = B.unsafeIndex bs
{-# INLINE [0] streamUtf8 #-}
-- | /O(n)/ Convert a 'ByteString' into a 'Stream Char', using little
-- endian UTF-16 encoding.
streamUtf16LE :: OnDecodeError -> ByteString -> Stream Char
streamUtf16LE onErr bs = Stream next 0 (maxSize (l `shiftR` 1))
where
l = B.length bs
{-# INLINE next #-}
next i
| i >= l = Done
| i+1 < l && U16.validate1 x1 = Yield (unsafeChr x1) (i+2)
| i+3 < l && U16.validate2 x1 x2 = Yield (U16.chr2 x1 x2) (i+4)
| otherwise = decodeError "streamUtf16LE" "UTF-16LE" onErr Nothing (i+1)
where
x1 = idx i + (idx (i + 1) `shiftL` 8)
x2 = idx (i + 2) + (idx (i + 3) `shiftL` 8)
idx = fromIntegral . B.unsafeIndex bs :: Int -> Word16
{-# INLINE [0] streamUtf16LE #-}
-- | /O(n)/ Convert a 'ByteString' into a 'Stream Char', using big
-- endian UTF-16 encoding.
streamUtf16BE :: OnDecodeError -> ByteString -> Stream Char
streamUtf16BE onErr bs = Stream next 0 (maxSize (l `shiftR` 1))
where
l = B.length bs
{-# INLINE next #-}
next i
| i >= l = Done
| i+1 < l && U16.validate1 x1 = Yield (unsafeChr x1) (i+2)
| i+3 < l && U16.validate2 x1 x2 = Yield (U16.chr2 x1 x2) (i+4)
| otherwise = decodeError "streamUtf16BE" "UTF-16BE" onErr Nothing (i+1)
where
x1 = (idx i `shiftL` 8) + idx (i + 1)
x2 = (idx (i + 2) `shiftL` 8) + idx (i + 3)
idx = fromIntegral . B.unsafeIndex bs :: Int -> Word16
{-# INLINE [0] streamUtf16BE #-}
-- | /O(n)/ Convert a 'ByteString' into a 'Stream Char', using big
-- endian UTF-32 encoding.
streamUtf32BE :: OnDecodeError -> ByteString -> Stream Char
streamUtf32BE onErr bs = Stream next 0 (maxSize (l `shiftR` 2))
where
l = B.length bs
{-# INLINE next #-}
next i
| i >= l = Done
| i+3 < l && U32.validate x = Yield (unsafeChr32 x) (i+4)
| otherwise = decodeError "streamUtf32BE" "UTF-32BE" onErr Nothing (i+1)
where
x = shiftL x1 24 + shiftL x2 16 + shiftL x3 8 + x4
x1 = idx i
x2 = idx (i+1)
x3 = idx (i+2)
x4 = idx (i+3)
idx = fromIntegral . B.unsafeIndex bs :: Int -> Word32
{-# INLINE [0] streamUtf32BE #-}
-- | /O(n)/ Convert a 'ByteString' into a 'Stream Char', using little
-- endian UTF-32 encoding.
streamUtf32LE :: OnDecodeError -> ByteString -> Stream Char
streamUtf32LE onErr bs = Stream next 0 (maxSize (l `shiftR` 2))
where
l = B.length bs
{-# INLINE next #-}
next i
| i >= l = Done
| i+3 < l && U32.validate x = Yield (unsafeChr32 x) (i+4)
| otherwise = decodeError "streamUtf32LE" "UTF-32LE" onErr Nothing (i+1)
where
x = shiftL x4 24 + shiftL x3 16 + shiftL x2 8 + x1
x1 = idx i
x2 = idx $ i+1
x3 = idx $ i+2
x4 = idx $ i+3
idx = fromIntegral . B.unsafeIndex bs :: Int -> Word32
{-# INLINE [0] streamUtf32LE #-}
-- | /O(n)/ Convert a 'Stream' 'Word8' to a 'ByteString'.
unstream :: Stream Word8 -> ByteString
unstream (Stream next s0 len) = unsafeDupablePerformIO $ do
let mlen = upperBound 4 len
mallocByteString mlen >>= loop mlen 0 s0
where
loop !n !off !s fp = case next s of
Done -> trimUp fp n off
Skip s' -> loop n off s' fp
Yield x s'
| off == n -> realloc fp n off s' x
| otherwise -> do
withForeignPtr fp $ \p -> pokeByteOff p off x
loop n (off+1) s' fp
{-# NOINLINE realloc #-}
realloc fp n off s x = do
let n' = n+n
fp' <- copy0 fp n n'
withForeignPtr fp' $ \p -> pokeByteOff p off x
loop n' (off+1) s fp'
{-# NOINLINE trimUp #-}
trimUp fp _ off = return $! PS fp 0 off
copy0 :: ForeignPtr Word8 -> Int -> Int -> IO (ForeignPtr Word8)
copy0 !src !srcLen !destLen =
do
dest <- mallocByteString destLen
withForeignPtr src $ \src' ->
withForeignPtr dest $ \dest' ->
memcpy dest' src' (fromIntegral srcLen)
return dest
decodeError :: forall s. String -> String -> OnDecodeError -> Maybe Word8
-> s -> Step s Char
decodeError func kind onErr mb i =
case onErr desc mb of
Nothing -> Skip i
Just c -> Yield c i
where desc = "Data.Text.Internal.Encoding.Fusion." ++ func ++ ": Invalid " ++
kind ++ " stream"
|
phischu/fragnix
|
tests/packages/scotty/Data.Text.Internal.Encoding.Fusion.hs
|
bsd-3-clause
| 7,791 | 0 | 15 | 2,439 | 2,367 | 1,233 | 1,134 | 147 | 3 |
-- | Create a bundle to be uploaded to Stackage Server.
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Stackage.ServerBundle
( serverBundle
, epochTime
, bpAllPackages
, docsListing
) where
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Tar.Entry as Tar
import qualified Codec.Compression.GZip as GZip
import qualified Data.Map as M
import qualified Data.Yaml as Y
import Filesystem (isFile)
import Foreign.C.Types (CTime (CTime))
import Stackage.BuildConstraints
import Stackage.BuildPlan
import Stackage.Prelude
import qualified System.PosixCompat.Time as PC
import qualified Text.XML as X
import Text.XML.Cursor
-- | Get current time
epochTime :: IO Tar.EpochTime
epochTime = (\(CTime t) -> fromIntegral t) <$> PC.epochTime
-- | All package/versions in a build plan, including core packages.
--
-- Note that this may include packages not available on Hackage.
bpAllPackages :: BuildPlan -> Map PackageName Version
bpAllPackages BuildPlan {..} =
siCorePackages bpSystemInfo ++ map ppVersion bpPackages
serverBundle :: Tar.EpochTime
-> Text -- ^ title
-> Text -- ^ slug
-> BuildPlan
-> LByteString
serverBundle time title slug bp@BuildPlan {..} = GZip.compress $ Tar.write
[ fe "build-plan.yaml" (fromStrict $ Y.encode bp)
, fe "hackage" hackage
, fe "slug" (fromStrict $ encodeUtf8 slug)
, fe "desc" (fromStrict $ encodeUtf8 title)
, fe "core" corePackagesList
]
where
fe name contents =
case Tar.toTarPath False name of
Left s -> error s
Right name' -> (Tar.fileEntry name' contents)
{ Tar.entryTime = time
}
hackage = builderToLazy $ foldMap goPair $ mapToList packageMap
-- need to remove some packages that don't exist on Hackage
packageMap = foldr deleteMap (bpAllPackages bp) $ map PackageName
[ "bin-package-db"
, "ghc"
, "rts"
]
goPair (name, version) =
toBuilder (display name) ++
toBuilder (asText "-") ++
toBuilder (display version) ++
toBuilder (asText "\n")
corePackagesList =
builderToLazy $ toBuilder $ unlines $
map (\(PackageName name) -> name)
(M.keys $ siCorePackages bpSystemInfo)
docsListing :: BuildPlan
-> FilePath -- ^ docs directory
-> IO ByteString
docsListing bp docsDir =
fmap (Y.encode . fold) $ mapM go $ mapToList $ bpAllPackages bp
where
go :: (PackageName, Version) -> IO (Map Text Y.Value)
go (package, version) = do -- handleAny (const $ return mempty) $ do
let dirname = fpFromText (concat
[ display package
, "-"
, display version
])
indexFP = (docsDir </> dirname </> "index.html")
ie <- isFile indexFP
if ie
then do
doc <- flip X.readFile indexFP X.def
{ X.psDecodeEntities = X.decodeHtmlEntities
}
let cursor = fromDocument doc
getPair x = take 1 $ do
href <- attribute "href" x
let name = concat $ x $// content
guard $ not $ null name
return (href, name)
pairs = cursor $// attributeIs "class" "module"
&/ laxElement "a" >=> getPair
m <- fmap fold $ forM pairs $ \(href, name) -> do
let suffix = dirname </> fpFromText href
e <- isFile $ docsDir </> suffix
return $ if e
then asMap $ singletonMap name [fpToText dirname, href]
else mempty
return $ singletonMap (display package) $ Y.object
[ "version" Y..= display version
, "modules" Y..= m
]
else return mempty
|
myfreeweb/stackage
|
Stackage/ServerBundle.hs
|
mit
| 4,251 | 0 | 22 | 1,550 | 1,024 | 538 | 486 | 91 | 3 |
{-
Copyright 2015 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module GHC.Exception (module M) where
import "base" GHC.Exception as M
|
Ye-Yong-Chi/codeworld
|
codeworld-base/src/GHC/Exception.hs
|
apache-2.0
| 741 | 0 | 4 | 136 | 23 | 17 | 6 | 4 | 0 |
module WaiAppStatic.Types
( -- * Pieces
Piece
, toPiece
, fromPiece
, unsafeToPiece
, Pieces
, toPieces
-- * Caching
, MaxAge (..)
-- * File\/folder serving
, FolderName
, Folder (..)
, File (..)
, LookupResult (..)
, Listing
-- * Settings
, StaticSettings (..)
) where
import Data.Text (Text)
import qualified Network.HTTP.Types as H
import qualified Network.Wai as W
import Data.ByteString (ByteString)
import System.Posix.Types (EpochTime)
import qualified Data.Text as T
import Data.ByteString.Builder (Builder)
import Network.Mime (MimeType)
-- | An individual component of a path, or of a filepath.
--
-- This is the core type used by wai-app-static for doing lookups. It provides
-- a smart constructor to avoid the possibility of constructing unsafe path
-- segments (though @unsafeToPiece@ can get around that as necessary).
--
-- Individual file lookup backends must know how to convert from a @Piece@ to
-- their storage system.
newtype Piece = Piece { fromPiece :: Text }
deriving (Show, Eq, Ord)
-- | Smart constructor for a @Piece@. Won\'t allow unsafe components, such as
-- pieces beginning with a period or containing a slash. This /will/, however,
-- allow null pieces.
toPiece :: Text -> Maybe Piece
toPiece t
| T.null t = Just $ Piece t
| T.head t == '.' = Nothing
| T.any (== '/') t = Nothing
| otherwise = Just $ Piece t
-- | Construct a @Piece@ without input validation.
unsafeToPiece :: Text -> Piece
unsafeToPiece = Piece
-- | Call @toPiece@ on a list.
--
-- > toPieces = mapM toPiece
toPieces :: [Text] -> Maybe Pieces
toPieces = mapM toPiece
-- | Request coming from a user. Corresponds to @pathInfo@.
--
-- The root path is the empty list.
type Pieces = [Piece]
-- | Values for the max-age component of the cache-control response header.
data MaxAge = NoMaxAge -- ^ no cache-control set
| MaxAgeSeconds Int -- ^ set to the given number of seconds
| MaxAgeForever -- ^ essentially infinite caching; in reality, probably one year
-- | Just the name of a folder.
type FolderName = Piece
-- | Represent contents of a single folder, which can be itself either a file
-- or a folder.
data Folder = Folder
{ folderContents :: [Either FolderName File]
}
-- | Information on an individual file.
data File = File
{ -- | Size of file in bytes
fileGetSize :: Integer
-- | How to construct a WAI response for this file. Some files are stored
-- on the filesystem and can use @ResponseFile@, while others are stored
-- in memory and should use @ResponseBuilder@.
, fileToResponse :: H.Status -> H.ResponseHeaders -> W.Response
-- | Last component of the filename.
, fileName :: Piece
-- | Calculate a hash of the contents of this file, such as for etag.
, fileGetHash :: IO (Maybe ByteString)
-- | Last modified time, used for both display in listings and if-modified-since.
, fileGetModified :: Maybe EpochTime
}
-- | Result of looking up a file in some storage backend.
--
-- The lookup is either a file or folder, or does not exist.
data LookupResult = LRFile File
| LRFolder Folder
| LRNotFound
-- | How to construct a directory listing page for the given request path and
-- the resulting folder.
type Listing = Pieces -> Folder -> IO Builder
-- | All of the settings available to users for tweaking wai-app-static.
--
-- Note that you should use the settings type approach for modifying values.
-- See <http://www.yesodweb.com/book/settings-types> for more information.
data StaticSettings = StaticSettings
{
-- | Lookup a single file or folder. This is how you can control storage
-- backend (filesystem, embedded, etc) and where to lookup.
ssLookupFile :: Pieces -> IO LookupResult
-- | Determine the mime type of the given file. Note that this function
-- lives in @IO@ in case you want to perform more complicated mimetype
-- analysis, such as via the @file@ utility.
, ssGetMimeType :: File -> IO MimeType
-- | Ordered list of filenames to be used for indices. If the user
-- requests a folder, and a file with the given name is found in that
-- folder, that file is served. This supercedes any directory listing.
, ssIndices :: [Piece]
-- | How to perform a directory listing. Optional. Will be used when the
-- user requested a folder.
, ssListing :: Maybe Listing
-- | Value to provide for max age in the cache-control.
, ssMaxAge :: MaxAge
-- | Given a requested path and a new destination, construct a string
-- that will go there. Default implementation will use relative paths.
, ssMkRedirect :: Pieces -> ByteString -> ByteString
-- | If @True@, send a redirect to the user when a folder is requested
-- and an index page should be displayed. When @False@, display the
-- content immediately.
, ssRedirectToIndex :: Bool
-- | Prefer usage of etag caching to last-modified caching.
, ssUseHash :: Bool
-- | Force a trailing slash at the end of directories
, ssAddTrailingSlash :: Bool
-- | Optional `W.Application` to be used in case of 404 errors
--
-- Since 3.1.3
, ss404Handler :: Maybe W.Application
}
|
sordina/wai
|
wai-app-static/WaiAppStatic/Types.hs
|
bsd-2-clause
| 5,370 | 0 | 11 | 1,296 | 622 | 391 | 231 | 63 | 1 |
{-# LANGUAGE RankNTypes #-}
module Main (main) where
import Common (commonMain)
import Control.DeepSeq
import Control.Monad.Identity
import qualified Control.Monad.Trans.Reader as R
import qualified Control.Monad.Trans.State.Strict as S
import Criterion.Main
import Data.Monoid
import Pipes
import Pipes.Lift
defaultMax :: Int
defaultMax = 10000
instance NFData a => NFData (Sum a)
main :: IO ()
main = commonMain defaultMax liftBenchmarks
iter :: forall m a . (Monad m , Ord a, Num a) => (a -> m a) -> a -> Effect m a
iter a vmax = loop 0
where
loop n
| n > vmax = return vmax
| otherwise = do
x <- lift $ a n
loop $! x
s_bench :: Int -> Effect (S.StateT Int Identity) Int
s_bench = iter (\n -> S.get >>= (\a -> S.put $! a + n) >> return (n + 1))
r_bench :: Int -> Effect (R.ReaderT Int Identity) Int
r_bench = iter (\n -> R.ask >>= (\a -> return $ n + a))
-- Run before Proxy
runB :: (a -> Effect Identity r) -> a -> r
runB f a = runIdentity $ runEffect $ f a
-- Run after Proxy
runA :: (Monad m) => (m r -> Identity a) -> Effect m r -> a
runA f a = runIdentity $ f (runEffect a)
liftBenchmarks :: Int -> [Benchmark]
liftBenchmarks vmax =
let applyBench = map ($ vmax)
in
[
bgroup "ReaderT" $
let defT f = (\d -> f d 1)
in applyBench
[
bench "runReaderP_B" . whnf (runB (runReaderP 1) . r_bench)
, bench "runReaderP_A" . whnf (runA (defT R.runReaderT) . r_bench)
]
, bgroup "StateT" $
let defT f = (\s -> f s 0)
in applyBench
[
bench "runStateP_B" . nf (runB (runStateP 0) . s_bench)
, bench "runStateP_A" . nf (runA (defT S.runStateT) . s_bench)
, bench "evalStateP_B" . whnf (runB (evalStateP 0) . s_bench)
, bench "evalStateP_A" . whnf (runA (defT S.evalStateT) . s_bench)
, bench "execStateP_B" . whnf (runB (execStateP 0) . s_bench)
, bench "execStateP_A" . whnf (runA (defT S.execStateT) . s_bench)
]
]
|
FranklinChen/Haskell-Pipes-Library
|
benchmarks/LiftBench.hs
|
bsd-3-clause
| 2,050 | 0 | 20 | 580 | 848 | 434 | 414 | 51 | 1 |
{-# OPTIONS -w #-}
module Plugin.Free.Theorem where
import Plugin.Free.Type
import Plugin.Free.Expr
import Plugin.Free.Util
data Theorem
= ThForall Var Type Theorem
| ThImplies Theorem Theorem
| ThEqual Expr Expr
| ThAnd Theorem Theorem
deriving (Eq,Show)
precIMPLIES, precAND :: Int
precIMPLIES = 5
precAND = 3
instance Pretty Theorem where
prettyP p t = prettyTheorem p False t
prettyTheorem :: Int -> Bool -> Theorem -> Doc
prettyTheorem p fa th@(ThForall v t p1)
| fa = prettyForall p [v] p1
| otherwise = prettyP p p1
prettyTheorem p fa (ThImplies p1 p2)
= prettyParenIndent (p > precIMPLIES) (
prettyTheorem (precIMPLIES+1) True p1
$$ nest (-1) (text "=>")
$$ prettyTheorem precIMPLIES fa p2
)
prettyTheorem _ _ (ThEqual e1 e2)
= prettyP 0 e1 <+> text "=" <+> prettyP 0 e2
prettyTheorem p fa (ThAnd e1 e2)
= prettyParenIndent (p > precAND) (
prettyTheorem (precAND+1) fa e1 $$ text "&&"
$$ prettyTheorem precAND fa e2
)
prettyForall :: Int -> [Var] -> Theorem -> Doc
prettyForall p vs (ThForall v t p1)
= prettyForall p (v:vs) p1
prettyForall p vs th
= parens (
text "forall" <+> hsep [ text v | v <- reverse vs ] <> text "."
<+> prettyTheorem 0 True th
)
varInTheorem :: Var -> Theorem -> Bool
varInTheorem v (ThForall v' t p)
= v /= v' && varInTheorem v p
varInTheorem v (ThImplies p1 p2)
= varInTheorem v p1 || varInTheorem v p2
varInTheorem v (ThEqual e1 e2)
= varInExpr v e1 || varInExpr v e2
varInTheorem v (ThAnd e1 e2)
= varInTheorem v e1 || varInTheorem v e2
applySimplifierTheorem :: (Theorem -> Theorem) -> (Theorem -> Theorem)
applySimplifierTheorem s (ThForall v t p)
= ThForall v t (s p)
applySimplifierTheorem s (ThImplies p1 p2)
= ThImplies (s p1) (s p2)
applySimplifierTheorem s p@(ThEqual _ _)
= p
applySimplifierTheorem s p@(ThAnd p1 p2)
= ThAnd (s p1) (s p2)
peepholeSimplifyTheorem :: Theorem -> Theorem
peepholeSimplifyTheorem
= peepholeSimplifyTheorem' . applySimplifierTheorem peepholeSimplifyTheorem
peepholeSimplifyTheorem' :: Theorem -> Theorem
peepholeSimplifyTheorem' (ThForall v t p)
= case varInTheorem v p of
True -> ThForall v t p
False -> p
peepholeSimplifyTheorem' p@(ThAnd e1 e2)
= foldr1 ThAnd (flattenAnd e1 . flattenAnd e2 $ [])
where
flattenAnd (ThAnd e1 e2) = flattenAnd e1 . flattenAnd e2
flattenAnd e = (e:)
peepholeSimplifyTheorem' p
= p
peepholeSimplifyExpr :: Expr -> Expr
peepholeSimplifyExpr
= peepholeSimplifyExpr' . applySimplifierExpr peepholeSimplifyExpr
peepholeSimplifyExpr' :: Expr -> Expr
peepholeSimplifyExpr' (EApp (EBuiltin BId) e2)
= e2
peepholeSimplifyExpr' (EApp (EBuiltin (BMap _)) (EBuiltin BId))
= EBuiltin BId
peepholeSimplifyExpr' e
= e
foldEquality :: Theorem -> Theorem
foldEquality p@(ThForall _ _ _)
= case foldEquality' p [] of
Just p' -> p'
Nothing -> applySimplifierTheorem foldEquality p
where
foldEquality' (ThForall v t p) vts
= foldEquality' p ((v,t):vts)
foldEquality' (ThImplies (ThEqual (EVar v) e2) p) vts
| v `elem` map fst vts
= foldEquality'' vts (theoremSubst v e2 p)
foldEquality' (ThImplies (ThEqual e1 (EVar v)) p) vts
| v `elem` map fst vts
= foldEquality'' vts (theoremSubst v e1 p)
foldEquality' _ vts
= Nothing
foldEquality'' [] e
= Just e
foldEquality'' ((v,t):vts) e
= foldEquality'' vts (ThForall v t e)
foldEquality p
= applySimplifierTheorem foldEquality p
tryCurrying :: Theorem -> Theorem
tryCurrying p@(ThForall _ _ _)
= case tryCurrying' p [] of
Just p' -> p'
Nothing -> applySimplifierTheorem tryCurrying p
where
tryCurrying' (ThForall v t p) vts
= tryCurrying' p ((v,t):vts)
tryCurrying' (ThEqual e1 e2) vts
= case (traverseRight ECDot e1, traverseRight ECDot e2) of
((ctx1, EVar v1), (ctx2, EVar v2))
| v1 == v2 && v1 `elem` map fst vts
&& not (varInCtx v1 ctx1) && not (varInCtx v2 ctx2)
-> tryCurrying'' vts (ThEqual (untraverse ctx1)
(untraverse ctx2))
_ -> Nothing
tryCurrying' _ _
= Nothing
traverseRight ctx (EApp e1 e2)
= traverseRight (ECAppR e1 ctx) e2
traverseRight ctx e
= (ctx, e)
untraverse ECDot = EBuiltin BId
untraverse (ECAppR e1 ECDot)
= e1
untraverse (ECAppR e1 ctx)
= EApp (EApp (EVarOp FR 9 ".") (untraverse ctx)) e1
tryCurrying'' [] e
= Just e
tryCurrying'' ((v,t):vts) e
= tryCurrying'' vts (ThForall v t e)
tryCurrying p
= applySimplifierTheorem tryCurrying p
theoremSimplify :: Theorem -> Theorem
theoremSimplify
= iterateUntilFixpoint
(foldEquality
. iterateUntilFixpoint peephole
. tryCurrying
. iterateUntilFixpoint peephole
)
where
iterateUntilFixpoint s t
= findFixpoint (iterate s t)
peephole t = findFixpoint (iterate peepholeSimplifyTheorem t)
findFixpoint (x1:xs@(x2:_))
| x1 == x2 = x2
| otherwise = findFixpoint xs
theoremSubst :: Var -> Expr -> Theorem -> Theorem
theoremSubst v e (ThForall f t p)
= ThForall f t (theoremSubst v e p)
theoremSubst v e (ThImplies p1 p2)
= ThImplies (theoremSubst v e p1) (theoremSubst v e p2)
theoremSubst v e (ThEqual e1 e2)
= ThEqual (exprSubst v e e1) (exprSubst v e e2)
theoremSubst v e (ThAnd p1 p2)
= ThAnd (theoremSubst v e p1) (theoremSubst v e p2)
-- vim: ts=4:sts=4:expandtab:ai
|
zeekay/lambdabot
|
Plugin/Free/Theorem.hs
|
mit
| 6,106 | 0 | 17 | 1,940 | 2,122 | 1,060 | 1,062 | 155 | 9 |
{-
(c) The University of Glasgow 2006
(c) The AQUA Project, Glasgow University, 1993-1998
This is useful, general stuff for the Native Code Generator.
Provide trees (of instructions), so that lists of instructions
can be appended in linear time.
-}
module OrdList (
OrdList,
nilOL, isNilOL, unitOL, appOL, consOL, snocOL, concatOL, lastOL,
mapOL, fromOL, toOL, foldrOL, foldlOL
) where
import GhcPrelude
import Outputable
import Data.Semigroup ( Semigroup )
import qualified Data.Semigroup as Semigroup
infixl 5 `appOL`
infixl 5 `snocOL`
infixr 5 `consOL`
data OrdList a
= None
| One a
| Many [a] -- Invariant: non-empty
| Cons a (OrdList a)
| Snoc (OrdList a) a
| Two (OrdList a) -- Invariant: non-empty
(OrdList a) -- Invariant: non-empty
instance Outputable a => Outputable (OrdList a) where
ppr ol = ppr (fromOL ol) -- Convert to list and print that
instance Semigroup (OrdList a) where
(<>) = appOL
instance Monoid (OrdList a) where
mempty = nilOL
mappend = (Semigroup.<>)
mconcat = concatOL
instance Functor OrdList where
fmap = mapOL
instance Foldable OrdList where
foldr = foldrOL
instance Traversable OrdList where
traverse f xs = toOL <$> traverse f (fromOL xs)
nilOL :: OrdList a
isNilOL :: OrdList a -> Bool
unitOL :: a -> OrdList a
snocOL :: OrdList a -> a -> OrdList a
consOL :: a -> OrdList a -> OrdList a
appOL :: OrdList a -> OrdList a -> OrdList a
concatOL :: [OrdList a] -> OrdList a
lastOL :: OrdList a -> a
nilOL = None
unitOL as = One as
snocOL as b = Snoc as b
consOL a bs = Cons a bs
concatOL aas = foldr appOL None aas
lastOL None = panic "lastOL"
lastOL (One a) = a
lastOL (Many as) = last as
lastOL (Cons _ as) = lastOL as
lastOL (Snoc _ a) = a
lastOL (Two _ as) = lastOL as
isNilOL None = True
isNilOL _ = False
None `appOL` b = b
a `appOL` None = a
One a `appOL` b = Cons a b
a `appOL` One b = Snoc a b
a `appOL` b = Two a b
fromOL :: OrdList a -> [a]
fromOL a = go a []
where go None acc = acc
go (One a) acc = a : acc
go (Cons a b) acc = a : go b acc
go (Snoc a b) acc = go a (b:acc)
go (Two a b) acc = go a (go b acc)
go (Many xs) acc = xs ++ acc
mapOL :: (a -> b) -> OrdList a -> OrdList b
mapOL _ None = None
mapOL f (One x) = One (f x)
mapOL f (Cons x xs) = Cons (f x) (mapOL f xs)
mapOL f (Snoc xs x) = Snoc (mapOL f xs) (f x)
mapOL f (Two x y) = Two (mapOL f x) (mapOL f y)
mapOL f (Many xs) = Many (map f xs)
foldrOL :: (a->b->b) -> b -> OrdList a -> b
foldrOL _ z None = z
foldrOL k z (One x) = k x z
foldrOL k z (Cons x xs) = k x (foldrOL k z xs)
foldrOL k z (Snoc xs x) = foldrOL k (k x z) xs
foldrOL k z (Two b1 b2) = foldrOL k (foldrOL k z b2) b1
foldrOL k z (Many xs) = foldr k z xs
foldlOL :: (b->a->b) -> b -> OrdList a -> b
foldlOL _ z None = z
foldlOL k z (One x) = k z x
foldlOL k z (Cons x xs) = foldlOL k (k z x) xs
foldlOL k z (Snoc xs x) = k (foldlOL k z xs) x
foldlOL k z (Two b1 b2) = foldlOL k (foldlOL k z b1) b2
foldlOL k z (Many xs) = foldl k z xs
toOL :: [a] -> OrdList a
toOL [] = None
toOL xs = Many xs
|
shlevy/ghc
|
compiler/utils/OrdList.hs
|
bsd-3-clause
| 3,258 | 0 | 9 | 945 | 1,499 | 763 | 736 | 91 | 6 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fa-IR">
<title>Alert Filters | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/alertFilters/src/main/javahelp/org/zaproxy/zap/extension/alertFilters/resources/help_fa_IR/helpset_fa_IR.hs
|
apache-2.0
| 974 | 78 | 66 | 159 | 413 | 209 | 204 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.JHC
-- Copyright : Isaac Jones 2003-2006
-- License : BSD3
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- This module contains most of the JHC-specific code for configuring, building
-- and installing packages.
module Distribution.Simple.JHC (
configure, getInstalledPackages,
buildLib, buildExe,
installLib, installExe
) where
import Distribution.PackageDescription as PD
( PackageDescription(..), BuildInfo(..), Executable(..)
, Library(..), libModules, hcOptions, usedExtensions )
import Distribution.InstalledPackageInfo
( emptyInstalledPackageInfo, )
import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.LocalBuildInfo
( LocalBuildInfo(..), ComponentLocalBuildInfo(..) )
import Distribution.Simple.BuildPaths
( autogenModulesDir, exeExtension )
import Distribution.Simple.Compiler
( CompilerFlavor(..), CompilerId(..), Compiler(..), AbiTag(..)
, PackageDBStack, Flag, languageToFlags, extensionsToFlags )
import Language.Haskell.Extension
( Language(Haskell98), Extension(..), KnownExtension(..))
import Distribution.Simple.Program
( ConfiguredProgram(..), jhcProgram, ProgramConfiguration
, userMaybeSpecifyPath, requireProgramVersion, lookupProgram
, rawSystemProgram, rawSystemProgramStdoutConf )
import Distribution.Version
( Version(..), orLaterVersion )
import Distribution.Package
( Package(..), ComponentId(ComponentId),
pkgName, pkgVersion, )
import Distribution.Simple.Utils
( createDirectoryIfMissingVerbose, writeFileAtomic
, installOrdinaryFile, installExecutableFile
, intercalate )
import System.FilePath ( (</>) )
import Distribution.Verbosity
import Distribution.Text
( Text(parse), display )
import Distribution.Compat.ReadP
( readP_to_S, string, skipSpaces )
import Distribution.System ( Platform )
import Data.List ( nub )
import Data.Char ( isSpace )
import qualified Data.Map as M ( empty )
import Data.Maybe ( fromMaybe )
import qualified Data.ByteString.Lazy.Char8 as BS.Char8
-- -----------------------------------------------------------------------------
-- Configuring
configure :: Verbosity -> Maybe FilePath -> Maybe FilePath
-> ProgramConfiguration -> IO (Compiler, Maybe Platform, ProgramConfiguration)
configure verbosity hcPath _hcPkgPath conf = do
(jhcProg, _, conf') <- requireProgramVersion verbosity
jhcProgram (orLaterVersion (Version [0,7,2] []))
(userMaybeSpecifyPath "jhc" hcPath conf)
let Just version = programVersion jhcProg
comp = Compiler {
compilerId = CompilerId JHC version,
compilerAbiTag = NoAbiTag,
compilerCompat = [],
compilerLanguages = jhcLanguages,
compilerExtensions = jhcLanguageExtensions,
compilerProperties = M.empty
}
compPlatform = Nothing
return (comp, compPlatform, conf')
jhcLanguages :: [(Language, Flag)]
jhcLanguages = [(Haskell98, "")]
-- | The flags for the supported extensions
jhcLanguageExtensions :: [(Extension, Flag)]
jhcLanguageExtensions =
[(EnableExtension TypeSynonymInstances , "")
,(DisableExtension TypeSynonymInstances , "")
,(EnableExtension ForeignFunctionInterface , "")
,(DisableExtension ForeignFunctionInterface , "")
,(EnableExtension ImplicitPrelude , "") -- Wrong
,(DisableExtension ImplicitPrelude , "--noprelude")
,(EnableExtension CPP , "-fcpp")
,(DisableExtension CPP , "-fno-cpp")
]
getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramConfiguration
-> IO InstalledPackageIndex
getInstalledPackages verbosity _packageDBs conf = do
-- jhc --list-libraries lists all available libraries.
-- How shall I find out, whether they are global or local
-- without checking all files and locations?
str <- rawSystemProgramStdoutConf verbosity jhcProgram conf ["--list-libraries"]
let pCheck :: [(a, String)] -> [a]
pCheck rs = [ r | (r,s) <- rs, all isSpace s ]
let parseLine ln =
pCheck (readP_to_S
(skipSpaces >> string "Name:" >> skipSpaces >> parse) ln)
return $
PackageIndex.fromList $
map (\p -> emptyInstalledPackageInfo {
InstalledPackageInfo.installedComponentId =
ComponentId (display p),
InstalledPackageInfo.sourcePackageId = p
}) $
concatMap parseLine $
lines str
-- -----------------------------------------------------------------------------
-- Building
-- | Building a package for JHC.
-- Currently C source files are not supported.
buildLib :: Verbosity -> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO ()
buildLib verbosity pkg_descr lbi lib clbi = do
let Just jhcProg = lookupProgram jhcProgram (withPrograms lbi)
let libBi = libBuildInfo lib
let args = constructJHCCmdLine lbi libBi clbi (buildDir lbi) verbosity
let pkgid = display (packageId pkg_descr)
pfile = buildDir lbi </> "jhc-pkg.conf"
hlfile= buildDir lbi </> (pkgid ++ ".hl")
writeFileAtomic pfile . BS.Char8.pack $ jhcPkgConf pkg_descr
rawSystemProgram verbosity jhcProg $
["--build-hl="++pfile, "-o", hlfile] ++
args ++ map display (libModules lib)
-- | Building an executable for JHC.
-- Currently C source files are not supported.
buildExe :: Verbosity -> PackageDescription -> LocalBuildInfo
-> Executable -> ComponentLocalBuildInfo -> IO ()
buildExe verbosity _pkg_descr lbi exe clbi = do
let Just jhcProg = lookupProgram jhcProgram (withPrograms lbi)
let exeBi = buildInfo exe
let out = buildDir lbi </> exeName exe
let args = constructJHCCmdLine lbi exeBi clbi (buildDir lbi) verbosity
rawSystemProgram verbosity jhcProg (["-o",out] ++ args ++ [modulePath exe])
constructJHCCmdLine :: LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo
-> FilePath -> Verbosity -> [String]
constructJHCCmdLine lbi bi clbi _odir verbosity =
(if verbosity >= deafening then ["-v"] else [])
++ hcOptions JHC bi
++ languageToFlags (compiler lbi) (defaultLanguage bi)
++ extensionsToFlags (compiler lbi) (usedExtensions bi)
++ ["--noauto","-i-"]
++ concat [["-i", l] | l <- nub (hsSourceDirs bi)]
++ ["-i", autogenModulesDir lbi]
++ ["-optc" ++ opt | opt <- PD.ccOptions bi]
-- It would be better if JHC would accept package names with versions,
-- but JHC-0.7.2 doesn't accept this.
-- Thus, we have to strip the version with 'pkgName'.
++ (concat [ ["-p", display (pkgName pkgid)]
| (_, pkgid) <- componentPackageDeps clbi ])
jhcPkgConf :: PackageDescription -> String
jhcPkgConf pd =
let sline name sel = name ++ ": "++sel pd
lib = fromMaybe (error "no library available") . library
comma = intercalate "," . map display
in unlines [sline "name" (display . pkgName . packageId)
,sline "version" (display . pkgVersion . packageId)
,sline "exposed-modules" (comma . PD.exposedModules . lib)
,sline "hidden-modules" (comma . otherModules . libBuildInfo . lib)
]
installLib :: Verbosity
-> LocalBuildInfo
-> FilePath
-> FilePath
-> FilePath
-> PackageDescription
-> Library
-> ComponentLocalBuildInfo
-> IO ()
installLib verb _lbi dest _dyn_dest build_dir pkg_descr _lib _clbi = do
let p = display (packageId pkg_descr)++".hl"
createDirectoryIfMissingVerbose verb True dest
installOrdinaryFile verb (build_dir </> p) (dest </> p)
installExe :: Verbosity -> FilePath -> FilePath -> (FilePath,FilePath) -> PackageDescription -> Executable -> IO ()
installExe verb dest build_dir (progprefix,progsuffix) _ exe = do
let exe_name = exeName exe
src = exe_name </> exeExtension
out = (progprefix ++ exe_name ++ progsuffix) </> exeExtension
createDirectoryIfMissingVerbose verb True dest
installExecutableFile verb (build_dir </> src) (dest </> out)
|
martinvlk/cabal
|
Cabal/Distribution/Simple/JHC.hs
|
bsd-3-clause
| 8,758 | 0 | 17 | 2,103 | 2,077 | 1,144 | 933 | 155 | 2 |
{-# OPTIONS_GHC -fwarn-unsafe #-}
{-# LANGUAGE FlexibleInstances #-}
module SH_Overlap9_A (
C(..)
) where
import SH_Overlap9_B
instance
{-# OVERLAPS #-}
C [Int] where
f _ = "[Int]"
|
urbanslug/ghc
|
testsuite/tests/safeHaskell/overlapping/SH_Overlap9_A.hs
|
bsd-3-clause
| 198 | 0 | 6 | 44 | 38 | 24 | 14 | 9 | 0 |
-- !!! Test passing doubles to a ccall
import Foreign.C
foreign import ccall unsafe "math.h sin"
c_sin :: CDouble -> IO CDouble
main = c_sin 1.0 >>= print
|
urbanslug/ghc
|
testsuite/tests/ffi/should_run/ffi003.hs
|
bsd-3-clause
| 160 | 0 | 7 | 33 | 40 | 21 | 19 | 4 | 1 |
{-# LANGUAGE UnicodeSyntax, MultiParamTypeClasses, FlexibleInstances #-}
module T2902_B_PairingSum (Sum(..), PSum) where
import T2902_Sum
data PSum a b = Empty | Tree a b [PSum a b]
instance (Ord a, Eq b, Num b) ⇒ Sum PSum a b where
insert v r = union $ Tree v r []
union x Empty = x
union Empty x = x
union x@(Tree v r xs) y@(Tree w s ys) =
case compare v w of
LT → Tree v r (y:xs)
GT → Tree w s (x:ys)
EQ → case r + s of
0 → z
t → insert v t z
where z = union (unions xs) (unions ys)
unions [] = Empty
unions [x] = x
unions (x : y : zs) = union (union x y) (unions zs)
extractMin Empty = undefined
extractMin (Tree v r xs) = ((v,r), unions xs)
fromList [] = Empty
fromList ((v,r):xs) = insert v r $ fromList xs
toList Empty = []
toList x = let (y, z) = extractMin x in y : toList z
|
ghc-android/ghc
|
testsuite/tests/perf/should_run/T2902_B_PairingSum.hs
|
bsd-3-clause
| 876 | 0 | 12 | 255 | 453 | 234 | 219 | 25 | 0 |
module Main where
checkSort :: Ord a => [a] -> Bool
checkSort [] = True
checkSort [_] = True
checkSort (x:y:xs) = (x <= y) && checkSort (y:xs)
checkSort' :: Ord a => [a] -> Bool
checkSort' = and . (drop 1 >>= zipWith (>=))
checkSort'' xs = and $ (drop 1 >>= zipWith (>=)) xs
checkSort''' xs = and $ zipWith (>=) (drop 1 xs) xs
checkSort'''' xs =
main :: IO ()
main = do print $ map checkSort ([[1..10],
[1,3,2],
[],
[4,3,2],
[4,5,5]] :: [[Int]])
print $ map checkSort' ([[1..10],
[1,3,2],
[],
[4,3,2],
[4,5,5]] :: [[Int]])
|
fredmorcos/attic
|
snippets/haskell/CheckSort.hs
|
isc
| 814 | 0 | 11 | 390 | 353 | 199 | 154 | -1 | -1 |
module Main where
import HIRST (mainH)
main = mainH
|
mihaimaruseac/hirst-v00
|
src/Main.hs
|
mit
| 54 | 0 | 5 | 11 | 17 | 11 | 6 | 3 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import qualified SDL.Image as Img
import qualified SDL as SDL
import Control.Concurrent (threadDelay)
import Control.Monad (unless)
import Linear.Affine
import Linear.V2
import Linear.V4
frameRate = 25
main :: IO ()
main = do
SDL.initialize [SDL.InitVideo]
Img.initImg [Img.InitPNG]
window <- SDL.createWindow "Example01" SDL.defaultWindow
SDL.showWindow window
--
p01SF <- Img.surfacePNG "img/p01.png"
rdr <- SDL.createRenderer window (-1) SDL.defaultRenderer
p01TX <- SDL.createTextureFromSurface rdr p01SF
SDL.freeSurface p01SF
--
let loop (x,y,i) = do
events <- SDL.pollEvents
let quit = any (== SDL.QuitEvent) $ map SDL.eventPayload events
--
SDL.clear rdr
SDL.copy rdr p01TX Nothing $ Just $ SDL.Rectangle (P $ V2 x y) (V2 30 30)
SDL.present rdr
threadDelay $ 10^6 `div` frameRate
unless (quit || i >= 15 * frameRate) $
loop (if x >= 770 then 30 else x+1 , if y >= 570 then 30 else y+2,i+1)
-- exec our main loop
loop (30,30,0)
SDL.destroyWindow window
SDL.destroyTexture p01TX
Img.quitImg
SDL.quit
|
jaiyalas/sdl2-image
|
example/unsorted/Example02.hs
|
mit
| 1,241 | 0 | 18 | 336 | 438 | 221 | 217 | 34 | 3 |
import Data.List (sort)
import Text.ParserCombinators.ReadP (ReadP)
import Text.ParserCombinators.ReadPrec (lift)
import Text.Read (readPrec)
import qualified Text.ParserCombinators.ReadP as RP
import Common (rpInt)
main :: IO ()
main = do
partOne >>= print
partTwo >>= print
partOne :: IO Int
partOne = process $ paperSize . read
partTwo :: IO Int
partTwo = process $ ribbon . read
process :: (String -> Int) -> IO Int
process f = sum . map f . lines <$> readFile "../input/02.txt"
data Box = Box Int Int Int
deriving Show
-- |
-- >>> read "2x3x4" :: Box
-- Box 2 3 4
--
instance Read Box where
readPrec = lift rpBox
rpBox :: ReadP Box
rpBox = do
l <- rpInt
_ <- RP.char 'x'
w <- rpInt
_ <- RP.char 'x'
Box l w <$> rpInt
-- |
-- >>> paperSize $ read "2x3x4"
-- 58
-- >>> paperSize $ read "1x1x10"
-- 43
--
paperSize :: Box -> Int
paperSize = (+) <$> slack <*> area
slack :: Box -> Int
slack (Box l w h) = i * j
where [i,j,_] = sort [l,w,h]
-- |
-- >>> area $ read "2x3x4"
-- 52
-- >>> area $ read "1x1x10"
-- 42
--
area :: Box -> Int
area (Box l w h) = 2*l*w + 2*w*h + 2*h*l
-- |
-- >>> ribbon $ read "2x3x4"
-- 34
-- >>> ribbon $ read "1x1x10"
-- 14
--
ribbon :: Box -> Int
ribbon (Box l w h) = (i + i + j + j) + (i * j * k)
where [i,j,k] = sort [l,w,h]
|
wizzup/advent_of_code
|
2015/haskell/exe/Day02.hs
|
mit
| 1,330 | 0 | 12 | 339 | 522 | 287 | 235 | 37 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Core.InputGLFW() where
import Core.Input
import qualified Graphics.UI.GLFW as GLFW
instance ToKey GLFW.Key where
toKey k = case k of
GLFW.Key'Space -> CharKey ' '
GLFW.Key'Apostrophe -> CharKey '\''
GLFW.Key'Comma -> CharKey ','
GLFW.Key'Minus -> CharKey '-'
GLFW.Key'Period -> CharKey '.'
GLFW.Key'Slash -> CharKey '/'
GLFW.Key'0 -> CharKey '0'
GLFW.Key'1 -> CharKey '1'
GLFW.Key'2 -> CharKey '2'
GLFW.Key'3 -> CharKey '3'
GLFW.Key'4 -> CharKey '4'
GLFW.Key'5 -> CharKey '5'
GLFW.Key'6 -> CharKey '6'
GLFW.Key'7 -> CharKey '7'
GLFW.Key'8 -> CharKey '8'
GLFW.Key'9 -> CharKey '9'
GLFW.Key'Semicolon -> CharKey ';'
GLFW.Key'Equal -> CharKey '='
GLFW.Key'A -> CharKey 'a'
GLFW.Key'B -> CharKey 'b'
GLFW.Key'C -> CharKey 'c'
GLFW.Key'D -> CharKey 'd'
GLFW.Key'E -> CharKey 'e'
GLFW.Key'F -> CharKey 'f'
GLFW.Key'G -> CharKey 'g'
GLFW.Key'H -> CharKey 'i'
GLFW.Key'J -> CharKey 'j'
GLFW.Key'K -> CharKey 'k'
GLFW.Key'L -> CharKey 'l'
GLFW.Key'M -> CharKey 'm'
GLFW.Key'N -> CharKey 'n'
GLFW.Key'O -> CharKey 'o'
GLFW.Key'P -> CharKey 'p'
GLFW.Key'Q -> CharKey 'q'
GLFW.Key'R -> CharKey 'r'
GLFW.Key'S -> CharKey 's'
GLFW.Key'T -> CharKey 't'
GLFW.Key'U -> CharKey 'u'
GLFW.Key'V -> CharKey 'v'
GLFW.Key'W -> CharKey 'w'
GLFW.Key'X -> CharKey 'x'
GLFW.Key'Y -> CharKey 'y'
GLFW.Key'Z -> CharKey 'z'
GLFW.Key'LeftBracket -> CharKey '['
GLFW.Key'Backslash -> CharKey '\\'
GLFW.Key'RightBracket -> CharKey ']'
GLFW.Key'GraveAccent -> CharKey '`'
v -> SpecialKey $ toSpecialKey v
instance ToKey GLFW.MouseButton where
toKey k = MouseButton $ toMouseButton k
instance ToMouseButton GLFW.MouseButton where
toMouseButton k = case k of
GLFW.MouseButton'1 -> LeftButton
GLFW.MouseButton'2 -> RightButton
GLFW.MouseButton'3 -> MiddleButton
GLFW.MouseButton'4 -> AdditionalButton 4
GLFW.MouseButton'5 -> AdditionalButton 5
GLFW.MouseButton'6 -> AdditionalButton 6
GLFW.MouseButton'7 -> AdditionalButton 7
GLFW.MouseButton'8 -> AdditionalButton 8
instance ToKeyState GLFW.KeyState where
toKeyState s = case s of
GLFW.KeyState'Pressed -> Down
GLFW.KeyState'Released -> Up
GLFW.KeyState'Repeating -> Repeating
instance ToKeyState GLFW.MouseButtonState where
toKeyState s = case s of
GLFW.MouseButtonState'Pressed -> Down
GLFW.MouseButtonState'Released -> Up
instance ToKeyState Bool where
toKeyState True = Down
toKeyState False = Up
instance ToModifiers GLFW.ModifierKeys where
toModifiers (GLFW.ModifierKeys shift' ctrl' alt' super') = Modifiers (toKeyState shift') (toKeyState ctrl') (toKeyState alt') (toKeyState super')
instance ToSpecialKey GLFW.Key where
toSpecialKey k = case k of
GLFW.Key'World1 -> KeyWorld1
GLFW.Key'World2 -> KeyWorld2
GLFW.Key'Escape -> KeyEscape
GLFW.Key'Enter -> KeyEnter
GLFW.Key'Tab -> KeyTab
GLFW.Key'Backspace -> KeyBackspace
GLFW.Key'Insert -> KeyInsert
GLFW.Key'Delete -> KeyDelete
GLFW.Key'Right -> KeyRight
GLFW.Key'Left -> KeyLeft
GLFW.Key'Down -> KeyDown
GLFW.Key'Up -> KeyUp
GLFW.Key'PageUp -> KeyPageUp
GLFW.Key'PageDown -> KeyPageDown
GLFW.Key'Home -> KeyHome
GLFW.Key'End -> KeyEnd
GLFW.Key'CapsLock -> KeyCapsLock
GLFW.Key'ScrollLock -> KeyScrollLock
GLFW.Key'NumLock -> KeyNumLock
GLFW.Key'PrintScreen -> KeyPrintScreen
GLFW.Key'Pause -> KeyPause
GLFW.Key'F1 -> KeyF1
GLFW.Key'F2 -> KeyF2
GLFW.Key'F3 -> KeyF3
GLFW.Key'F4 -> KeyF4
GLFW.Key'F5 -> KeyF5
GLFW.Key'F6 -> KeyF6
GLFW.Key'F7 -> KeyF7
GLFW.Key'F8 -> KeyF8
GLFW.Key'F9 -> KeyF9
GLFW.Key'F10 -> KeyF10
GLFW.Key'F11 -> KeyF11
GLFW.Key'F12 -> KeyF12
GLFW.Key'F13 -> KeyF13
GLFW.Key'F14 -> KeyF14
GLFW.Key'F15 -> KeyF15
GLFW.Key'F16 -> KeyF16
GLFW.Key'F17 -> KeyF17
GLFW.Key'F18 -> KeyF18
GLFW.Key'F19 -> KeyF19
GLFW.Key'F20 -> KeyF20
GLFW.Key'F21 -> KeyF21
GLFW.Key'F22 -> KeyF22
GLFW.Key'F23 -> KeyF23
GLFW.Key'F24 -> KeyF24
GLFW.Key'F25 -> KeyF25
GLFW.Key'Pad0 -> KeyPad0
GLFW.Key'Pad1 -> KeyPad1
GLFW.Key'Pad2 -> KeyPad2
GLFW.Key'Pad3 -> KeyPad3
GLFW.Key'Pad4 -> KeyPad4
GLFW.Key'Pad5 -> KeyPad5
GLFW.Key'Pad6 -> KeyPad6
GLFW.Key'Pad7 -> KeyPad7
GLFW.Key'Pad8 -> KeyPad8
GLFW.Key'Pad9 -> KeyPad9
GLFW.Key'PadDecimal -> KeyPadDecimal
GLFW.Key'PadDivide -> KeyPadDivide
GLFW.Key'PadMultiply -> KeyPadMultiply
GLFW.Key'PadSubtract -> KeyPadSubtract
GLFW.Key'PadAdd -> KeyPadAdd
GLFW.Key'PadEnter -> KeyPadEnter
GLFW.Key'PadEqual -> KeyPadEqual
GLFW.Key'LeftShift -> KeyShiftL
GLFW.Key'LeftControl -> KeyCtrlL
GLFW.Key'LeftAlt -> KeyAltL
GLFW.Key'LeftSuper -> KeySuperL
GLFW.Key'RightShift -> KeyShiftR
GLFW.Key'RightControl -> KeyCtrlR
GLFW.Key'RightAlt -> KeyAltR
GLFW.Key'RightSuper -> KeySuperR
GLFW.Key'Menu -> KeyMenu
v -> KeyUnknown (fromEnum v)
|
NCrashed/sinister
|
src/client/Core/InputGLFW.hs
|
mit
| 5,161 | 0 | 11 | 1,148 | 1,488 | 721 | 767 | 155 | 0 |
module GOL where
import Data.Array.IArray
import Data.Array.Unboxed
data World = World {
gridWidth :: Int,
gridHeight :: Int,
grid :: Grid
} deriving (Show)
type Grid = UArray (Int, Int) Bool
setupGrid :: Int -> Int -> World
setupGrid x y = let cells = replicate (x*y) False
grid = listArray ((0, 0), (x-1, y-1)) cells
in World { gridWidth = x, gridHeight = y, grid = grid }
neighbourhood :: World -> (Int, Int) -> [Bool]
neighbourhood world (x, y) = map (index $ grid world) neighbourPositions
where neighbourPositions = [(a, b) | a <- [x-1..x+1], b <- [y-1..y+1], (a,b) /= (x,y)]
index grid position = grid ! torusIndex position
torusIndex (x, y) = ((w + x) `mod` w, (h + y) `mod` h)
w = gridWidth world
h = gridHeight world
liveNeighbours :: World -> (Int, Int) -> Int
liveNeighbours grid (position) = length . filter (== True) $ neighbourhood grid position
liveOrDie :: World -> (Int, Int) -> Bool
liveOrDie world position = case liveNeighbours world position of
2 -> grid world ! position
3 -> True
_ -> False
evolve :: World -> World
evolve g = g { grid = listArray ((0, 0), (gridWidth g - 1, gridHeight g - 1)) newCells }
where positions = indices $ grid g
newCells = map (liveOrDie g) positions
|
Lateks/gol
|
src/GOL.hs
|
mit
| 1,414 | 0 | 12 | 432 | 586 | 325 | 261 | 31 | 3 |
-- xmonad config used by Vic Fryzel
-- Author: Vic Fryzel
-- https://github.com/vicfryzel/xmonad-config
import System.IO
import System.Exit
import XMonad
import XMonad.Hooks.DynamicLog
import XMonad.Hooks.ManageDocks
import XMonad.Hooks.ManageHelpers
import XMonad.Hooks.SetWMName
import XMonad.Layout.Fullscreen
import XMonad.Layout.NoBorders
import XMonad.Layout.Spiral
import XMonad.Layout.Tabbed
import XMonad.Layout.ThreeColumns
import XMonad.Util.Run(spawnPipe)
import XMonad.Util.EZConfig(additionalKeys)
import Graphics.X11.ExtraTypes.XF86
import qualified XMonad.StackSet as W
import qualified Data.Map as M
------------------------------------------------------------------------
-- Terminal
-- The preferred terminal program, which is used in a binding below and by
-- certain contrib modules.
--
--myTerminal = "/usr/local/bin/xfce4-terminal"
myTerminal = "/usr/local/bin/terminator -u"
-- The command to lock the screen or show the screensaver.
myScreensaver = "/usr/bin/xscreensaver-command -l"
-- The command to take a selective screenshot, where you select
-- what you'd like to capture on the screen.
mySelectScreenshot = "select-screenshot"
-- The command to take a fullscreen screenshot.
myScreenshot = "screenshot"
-- The command to use as a launcher, to launch commands that don't have
-- preset keybindings.
myLauncher = "$(yeganesh -x -- -fn 'monospace-8' -nb '#000000' -nf '#FFFFFF' -sb '#7C7C7C' -sf '#CEFFAC')"
-- Location of your xmobar.hs / xmobarrc
myXmobarrc = "~/.xmonad/xmobar-single.hs"
------------------------------------------------------------------------
-- Workspaces
-- The default number of workspaces (virtual screens) and their names.
--
myWorkspaces = ["1:web","2:work","3:code","4:docs","5:media","6:me","7:dia","8:Chat","9:Win"]
--myWorkspaces = ["1:web","2:work","3:code","4:docs","5:media"] ++ map show [6..9]
--myWorkspaces =
-- [
-- "7:Chat", "8:Dbg", "9:Win",
-- "4:Docs", "5:1Dev", "6:1Web",
-- "1:Web", "2:work", "3:Dev",
-- "0:VM", "Extr1", "Extr2"
-- ]
startupWorkspace = "2:work" -- which workspace do you want to be on after launch?
------------------------------------------------------------------------
-- Window rules
-- Execute arbitrary actions and WindowSet manipulations when managing
-- a new window. You can use this to, for example, always float a
-- particular program, or have a client always appear on a particular
-- workspace.
--
-- To find the property name associated with a program, use
-- > xprop | grep WM_CLASS
-- and click on the client you're interested in.
--
-- To match on the WM_NAME, you can use 'title' in the same way that
-- 'className' and 'resource' are used below.
--
myManageHook = composeAll
[ className =? "Chromium" --> doShift "2:web"
, className =? "Google-chrome" --> doShift "2:web"
, resource =? "desktop_window" --> doIgnore
, className =? "Galculator" --> doFloat
, className =? "Steam" --> doFloat
, className =? "Gimp" --> doFloat
, resource =? "gpicview" --> doFloat
, className =? "MPlayer" --> doFloat
, className =? "VirtualBox" --> doShift "4:vm"
, className =? "Xchat" --> doShift "5:media"
, className =? "stalonetray" --> doIgnore
, isFullscreen --> (doF W.focusDown <+> doFullFloat)]
------------------------------------------------------------------------
-- Layouts
-- You can specify and transform your layouts by modifying these values.
-- If you change layout bindings be sure to use 'mod-shift-space' after
-- restarting (with 'mod-q') to reset your layout state to the new
-- defaults, as xmonad preserves your old layout settings by default.
--
-- The available layouts. Note that each layout is separated by |||,
-- which denotes layout choice.
--
myLayout = avoidStruts (
Tall 1 (3/100) (1/2) |||
Mirror (Tall 1 (3/100) (1/2)) |||
ThreeColMid 1 (3/100) (1/2) |||
--Tall 1 (3/100) (1/2) |||
--Mirror (Tall 1 (3/100) (1/2)) |||
tabbed shrinkText tabConfig |||
Full |||
spiral (6/7)) |||
noBorders (fullscreenFull Full)
------------------------------------------------------------------------
-- Colors and borders
-- Currently based on the ir_black theme.
--
myNormalBorderColor = "#7c7c7c"
myFocusedBorderColor = "#ffb6b0"
-- Colors for text and backgrounds of each tab when in "Tabbed" layout.
tabConfig = defaultTheme {
activeBorderColor = "#7C7C7C",
activeTextColor = "#CEFFAC",
activeColor = "#000000",
inactiveBorderColor = "#7C7C7C",
inactiveTextColor = "#EEEEEE",
inactiveColor = "#000000"
}
-- Color of current window title in xmobar.
xmobarTitleColor = "#FFB6B0"
-- Color of current workspace in xmobar.
xmobarCurrentWorkspaceColor = "#CEFFAC"
-- Width of the window border in pixels.
myBorderWidth = 1
------------------------------------------------------------------------
-- Key bindings
--
-- modMask lets you specify which modkey you want to use. The default
-- is mod1Mask ("left alt"). You may also consider using mod3Mask
-- ("right alt"), which does not conflict with emacs keybindings. The
-- "windows key" is usually mod4Mask.
--
myModMask = mod4Mask
--myModMask = mod1Mask
myKeys conf@(XConfig {XMonad.modMask = modMask}) = M.fromList $
----------------------------------------------------------------------
-- Custom key bindings
--
-- Start a terminal. Terminal to start is specified by myTerminal variable.
[ ((modMask .|. shiftMask, xK_Return),
spawn $ XMonad.terminal conf)
-- Lock the screen using command specified by myScreensaver.
, ((modMask .|. controlMask, xK_l),
spawn myScreensaver)
-- Spawn the launcher using command specified by myLauncher.
-- Use this to launch programs without a key binding.
--mod+p evan
--, ((myModMask, xK_p), spawn "synapse")
-- evan 2019 mod+d
, ((myModMask, xK_d), spawn "dmenu_run")
--, ((modMask, xK_p),
-- spawn myLauncher)
-- Take a selective screenshot using the command specified by mySelectScreenshot.
, ((modMask .|. shiftMask, xK_p),
spawn mySelectScreenshot)
-- Take a full screenshot using the command specified by myScreenshot.
, ((modMask .|. controlMask .|. shiftMask, xK_p),
spawn myScreenshot)
-- Mute volume.
, ((0, xF86XK_AudioMute),
spawn "amixer -q set Master toggle")
-- Decrease volume.
, ((0, xF86XK_AudioLowerVolume),
spawn "amixer -q set Master 5%-")
-- Increase volume.
, ((0, xF86XK_AudioRaiseVolume),
spawn "amixer -q set Master 5%+")
-- Mute volume.
, ((modMask .|. controlMask, xK_m),
spawn "amixer -q set Master toggle")
-- Decrease volume.
, ((modMask .|. controlMask, xK_j),
spawn "amixer -q set Master 5%-")
-- Increase volume.
, ((modMask .|. controlMask, xK_k),
spawn "amixer -q set Master 5%+")
-- Audio previous.
, ((0, 0x1008FF16),
spawn "")
-- Play/pause.
, ((0, 0x1008FF14),
spawn "")
-- Audio next.
, ((0, 0x1008FF17),
spawn "")
-- Eject CD tray.
, ((0, 0x1008FF2C),
spawn "eject -T")
--------------------------------------------------------------------
-- "Standard" xmonad key bindings
--
-- Close focused window.
, ((modMask .|. shiftMask, xK_c),
kill)
-- Cycle through the available layout algorithms.
, ((modMask, xK_space),
sendMessage NextLayout)
-- Reset the layouts on the current workspace to default.
, ((modMask .|. shiftMask, xK_space),
setLayout $ XMonad.layoutHook conf)
-- Resize viewed windows to the correct size.
, ((modMask, xK_n),
refresh)
-- Move focus to the next window.
, ((modMask, xK_Tab),
windows W.focusDown)
-- Move focus to the next window.
, ((modMask, xK_j),
windows W.focusDown)
-- Move focus to the previous window.
, ((modMask, xK_k),
windows W.focusUp )
-- Move focus to the master window.
, ((modMask, xK_m),
windows W.focusMaster )
-- Swap the focused window and the master window.
, ((modMask, xK_Return),
windows W.swapMaster)
-- Swap the focused window with the next window.
, ((modMask .|. shiftMask, xK_j),
windows W.swapDown )
-- Swap the focused window with the previous window.
, ((modMask .|. shiftMask, xK_k),
windows W.swapUp )
-- Shrink the master area.
, ((modMask, xK_h),
sendMessage Shrink)
-- Expand the master area.
, ((modMask, xK_l),
sendMessage Expand)
-- Push window back into tiling.
, ((modMask, xK_t),
withFocused $ windows . W.sink)
-- Increment the number of windows in the master area.
, ((modMask, xK_comma),
sendMessage (IncMasterN 1))
-- Decrement the number of windows in the master area.
, ((modMask, xK_period),
sendMessage (IncMasterN (-1)))
-- Toggle the status bar gap.
-- TODO: update this binding with avoidStruts, ((modMask, xK_b),
-- Quit xmonad.
, ((modMask .|. shiftMask, xK_q),
io (exitWith ExitSuccess))
-- Restart xmonad.
, ((modMask, xK_q),
restart "xmonad" True)
]
++
-- mod-[1..9], Switch to workspace N
-- mod-shift-[1..9], Move client to workspace N
[((m .|. modMask, k), windows $ f i)
| (i, k) <- zip (XMonad.workspaces conf) [xK_1 .. xK_9]
, (f, m) <- [(W.greedyView, 0), (W.shift, shiftMask)]]
++
-- mod-{w,e,r}, Switch to physical/Xinerama screens 1, 2, or 3
-- mod-shift-{w,e,r}, Move client to screen 1, 2, or 3
[((m .|. modMask, key), screenWorkspace sc >>= flip whenJust (windows . f))
| (key, sc) <- zip [xK_w, xK_e, xK_r] [0..]
, (f, m) <- [(W.view, 0), (W.shift, shiftMask)]]
------------------------------------------------------------------------
-- Mouse bindings
--
-- Focus rules
-- True if your focus should follow your mouse cursor.
myFocusFollowsMouse :: Bool
myFocusFollowsMouse = True
myMouseBindings (XConfig {XMonad.modMask = modMask}) = M.fromList $
[
-- mod-button1, Set the window to floating mode and move by dragging
((modMask, button1),
(\w -> focus w >> mouseMoveWindow w))
-- mod-button2, Raise the window to the top of the stack
, ((modMask, button2),
(\w -> focus w >> windows W.swapMaster))
-- mod-button3, Set the window to floating mode and resize by dragging
, ((modMask, button3),
(\w -> focus w >> mouseResizeWindow w))
-- you may also bind events to the mouse scroll wheel (button4 and button5)
]
------------------------------------------------------------------------
-- Status bars and logging
-- Perform an arbitrary action on each internal state change or X event.
-- See the 'DynamicLog' extension for examples.
--
-- To emulate dwm's status bar
--
-- > logHook = dynamicLogDzen
--
------------------------------------------------------------------------
-- Startup hook
-- Perform an arbitrary action each time xmonad starts or is restarted
-- with mod-q. Used by, e.g., XMonad.Layout.PerWorkspace to initialize
-- per-workspace layout choices.
-- by evan
myStartupHook = do
spawn "/home/evan/.xmonad/autostart.sh"
-- spawnOnce "/home/evan/.xmonad/autostart.sh"
-- By default, do nothing.
--myStartupHook = return ()
------------------------------------------------------------------------
-- Run xmonad with all the defaults we set up.
--
main = do
xmproc <- spawnPipe ("xmobar " ++ myXmobarrc)
xmonad $ defaults {
logHook = dynamicLogWithPP $ xmobarPP {
ppOutput = hPutStrLn xmproc
, ppTitle = xmobarColor xmobarTitleColor "" . shorten 100
, ppCurrent = xmobarColor xmobarCurrentWorkspaceColor ""
, ppSep = " "
}
, manageHook = manageDocks <+> myManageHook
-- , startupHook = docksStartupHook <+> setWMName "LG3D"
, startupHook = setWMName "LG3D"
-- spawn "~/.xmonad/startup-hook" -- evan
, handleEventHook = docksEventHook
}
------------------------------------------------------------------------
-- Combine it all together
-- A structure containing your configuration settings, overriding
-- fields in the default config. Any you don't override, will
-- use the defaults defined in xmonad/XMonad/Config.hs
--
-- No need to modify this.
--
defaults = defaultConfig {
-- simple stuff
terminal = myTerminal,
focusFollowsMouse = myFocusFollowsMouse,
borderWidth = myBorderWidth,
modMask = myModMask,
workspaces = myWorkspaces,
normalBorderColor = myNormalBorderColor,
focusedBorderColor = myFocusedBorderColor,
-- key bindings
keys = myKeys,
mouseBindings = myMouseBindings,
-- hooks, layouts
layoutHook = smartBorders $ myLayout,
manageHook = myManageHook,
startupHook = myStartupHook
}
|
evan886/myxmonad
|
4bsd/2020/xmonad.hs
|
mit
| 12,793 | 0 | 17 | 2,566 | 1,989 | 1,231 | 758 | 169 | 1 |
module MattermostBot.Data (
module MattermostBot.Data.Config
, module MattermostBot.Data.Slack
) where
import MattermostBot.Data.Config
import MattermostBot.Data.Slack
|
marcelbuesing/mattermost-bot
|
src/MattermostBot/Data.hs
|
mit
| 177 | 0 | 5 | 23 | 34 | 23 | 11 | 5 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Test.Smoke.App.PrintResults
( printResult,
)
where
import Control.Monad (forM_, when)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Reader (ask)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.String (fromString)
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Vector as Vector
import System.IO.Error (ioeGetErrorString)
import Test.Smoke
import Test.Smoke.App.Diff
import Test.Smoke.App.OptionTypes
import Test.Smoke.App.Print
import Test.Smoke.App.PrintErrors
import Test.Smoke.Paths
import Text.Printf (printf)
data PartName = ShortName String | LongName String
printResult :: TestResult -> Output ()
printResult result@(TestResult testPlan@TestPlan {planTest = test} statusResult stdOutResult stdErrResult fileResults)
| isSuccess result =
putGreenLn " succeeded"
| otherwise = do
printFailingInput
"args"
( Text.unlines . Vector.toList . Vector.map fromString . unArgs
<$> testArgs test
)
printFailingInput "input" (planStdIn testPlan <$ testStdIn test)
printFailingOutput "status" (toAssertionResult ((<> "\n") . showInt . unStatus <$> statusResult))
printFailingOutput "stdout" stdOutResult
printFailingOutput "stderr" stdErrResult
printFailingFilesOutput fileResults
printResult (TestError _ testError) = printTestError testError
printResult (TestIgnored _) = putYellowLn " ignored"
printFailingInput :: (Foldable f, FixtureType a) => String -> f a -> Output ()
printFailingInput name value =
forM_ value $ \v -> do
putRed $ fromString $ indentedKey (" " ++ name ++ ":")
putPlainLn $ indented outputIndentation (serializeFixture v)
printFailingOutput :: FixtureType a => String -> AssertionResult a -> Output ()
printFailingOutput name = printFailures (ShortName name)
printFailingFilesOutput ::
Map (RelativePath File) (AssertionResult TestFileContents) -> Output ()
printFailingFilesOutput fileResults =
if all isSuccess (Map.elems fileResults)
then return ()
else do
putRedLn " files:"
forM_ (Map.assocs fileResults) $ uncurry printFailingFileOutput
printFailingFileOutput :: FixtureType a => RelativePath File -> AssertionResult a -> Output ()
printFailingFileOutput path = printFailures (LongName (" " ++ toFilePath path))
printFailures :: FixtureType a => PartName -> AssertionResult a -> Output ()
printFailures _ AssertionSuccess =
return ()
printFailures name (AssertionFailure (SingleAssertionFailure failure)) = do
printFailureName name (failureIsInline failure)
printFailure failure
printFailures name (AssertionFailure (MultipleAssertionFailures failures)) = do
printFailureName name isInline
printFailure firstFailure
forM_ (Vector.tail failures) $ \failure -> do
putRed " or:"
when isInline $ putPlain " "
printFailure failure
where
firstFailure = Vector.head failures
isInline = failureIsInline firstFailure
printFailureName :: PartName -> Bool -> Output ()
printFailureName (ShortName name) isInline = do
putRed $ " " <> Text.pack name <> ":"
when isInline $ putPlain $ Text.replicate (outputIndentation - length name - 3) " "
printFailureName (LongName name) _ = do
putRedLn $ " " <> Text.pack name <> ":"
putPlain $ fromString $ indentedKey ""
failureIsInline :: AssertionFailure a -> Bool
failureIsInline AssertionFailureDiff {} = True
failureIsInline AssertionFailureContains {} = False
failureIsInline AssertionFailureExpectedFileError {} = True
failureIsInline AssertionFailureActualFileError {} = True
printFailure :: FixtureType a => AssertionFailure a -> Output ()
printFailure (AssertionFailureDiff (Expected expected) (Actual actual)) =
printDiff (serializeFixture expected) (serializeFixture actual)
printFailure (AssertionFailureContains (Expected expected) (Actual actual)) = do
putPlainLn ""
putRedLn " expected to contain:"
putRedLn $ indentedAll nestedOutputIndentation (serializeFixture expected)
putRed " actual: "
putRedLn $ indented nestedOutputIndentation (serializeFixture actual)
printFailure (AssertionFailureExpectedFileError fileError (Actual actual)) = do
printFailureFileError fileError
putRed " actual: "
putRedLn $ indented nestedOutputIndentation (serializeFixture actual)
printFailure (AssertionFailureActualFileError fileError) =
printFailureFileError fileError
printFailureFileError :: SmokeFileError -> Output ()
printFailureFileError (MissingFile path) = do
putPlainLn ""
putRedLn $ " The fixture " <> showPath path <> " does not exist."
printFailureFileError (CouldNotReadFile _ exception) = do
putRedLn $ fromString (ioeGetErrorString exception)
printDiff :: Text -> Text -> Output ()
printDiff left right = do
AppOptions
{ optionsColor = color,
optionsDiffEngine = DiffEngine {engineRender = renderDiff}
} <-
ask
diff <- liftIO $ renderDiff color left right
putPlainLn $ indented outputIndentation diff
toAssertionResult :: FixtureType a => EqualityResult a -> AssertionResult a
toAssertionResult EqualitySuccess = AssertionSuccess
toAssertionResult (EqualityFailure expected actual) = AssertionFailure $ SingleAssertionFailure $ AssertionFailureDiff expected actual
indentedKey :: String -> String
indentedKey = printf ("%-" ++ show outputIndentation ++ "s")
|
SamirTalwar/Smoke
|
src/app/Test/Smoke/App/PrintResults.hs
|
mit
| 5,383 | 0 | 15 | 846 | 1,572 | 772 | 800 | 115 | 2 |
module Mockups.Parsers.Box where
import Control.Applicative
import Data.Attoparsec.Char8
import qualified Data.ByteString.Char8 as BS
import Mockups.Parsers.Combinators
import Mockups.Elements.Element
boxParser :: Parser ContainerAttr
boxParser = vboxParser <|> hboxParser
vboxParser :: Parser ContainerAttr
vboxParser = do
Vbox <$> withOptsAttrs "vbox" parseSize
hboxParser :: Parser ContainerAttr
hboxParser = do
Hbox <$> withOptsAttrs "hbox" parseSize
parseSize :: Parser (BS.ByteString, BoxAttr)
parseSize = withAttrName "size" (BoxSize <$> decimal)
|
ostapneko/tiny-mockups
|
src/main/Mockups/Parsers/Box.hs
|
mit
| 609 | 0 | 8 | 116 | 139 | 78 | 61 | 16 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Main where
import Nanomsg
import Test.Framework.TH (defaultMainGenerator)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.QuickCheck
import Test.QuickCheck.Monadic
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as C
import Control.Concurrent (threadDelay)
import Control.Applicative ( (<$>) )
import Data.Maybe (catMaybes)
instance Arbitrary ByteString where
arbitrary = C.pack <$> arbitrary
-- dummy test
prop_reverse :: [Int] -> Bool
prop_reverse xs =
xs == reverse (reverse xs)
-- test Pub and Sub sockets
prop_PubSub :: Property
prop_PubSub = monadicIO $ do
msgs <- pick arbitrary
pre $ not (null msgs)
res <- run $ do
pub <- socket Pub
ep1 <- bind pub "inproc://pubsub"
sub1 <- socket Sub
ep2 <- connect sub1 "inproc://pubsub"
subscribe sub1 $ C.pack ""
sub2 <- socket Sub
ep3 <- connect sub2 "inproc://pubsub"
subscribe sub2 $ C.pack ""
threadDelay 1000
r <- mapM (sendMsg pub sub1 sub2) msgs
unsubscribe sub2 $ C.pack ""
unsubscribe sub1 $ C.pack ""
shutdown sub2 ep3
shutdown sub1 ep2
shutdown pub ep1
close pub
close sub1
close sub2
threadDelay 1000
return r
assert $ and res
where
sendMsg pub sub1 sub2 msg = do
send pub msg
send pub msg
a <- recv sub1
b <- recv sub1
c <- recv sub2
d <- recv sub2
return $ a == msg && b == msg && c == msg && d == msg
-- test Pair sockets
prop_Pair :: Property
prop_Pair = monadicIO $ do
msgs <- pick arbitrary
pre $ not (null msgs)
res <- run $ do
s1 <- socket Pair
_ <- bind s1 "inproc://pair"
s2 <- socket Pair
_ <- connect s2 "inproc://pair"
threadDelay 1000
-- Send message from s1 to s2, then back from s2 to s1, then make sure it hasn't changed
r <- mapM (\m -> send s1 m >> recv s2 >>= send s2 >> recv s1 >>= return . (== m)) msgs
close s1
close s2
threadDelay 1000
return r
assert $ and res
-- test Pipeline (Push & Pull) sockets
prop_Pipeline :: Property
prop_Pipeline = monadicIO $ do
msgs <- pick arbitrary
pre $ not (null msgs)
res <- run $ do
push <- socket Push
_ <- bind push "inproc://pipeline"
pull1 <- socket Pull
pull2 <- socket Pull
_ <- connect pull1 "inproc://pipeline"
_ <- connect pull2 "inproc://pipeline"
threadDelay 1000
r <- mapM (testSockets push pull1 pull2) msgs
close push
close pull1
close pull2
threadDelay 1000
return r
assert $ and res
where
testSockets push pull1 pull2 msg = do
send push msg
send push msg
send push msg
threadDelay 1000
a <- recv' pull1
b <- recv' pull1
c <- recv' pull1
d <- recv' pull2
e <- recv' pull2
f <- recv' pull2
let xs = catMaybes [a, b, c, d, e, f]
return $ all (== msg) xs && (length xs == 3)
-- test Req and Rep sockets
prop_ReqRep :: Property
prop_ReqRep = monadicIO $ do
msgs <- pick arbitrary
pre $ not (null msgs)
res <- run $ do
req <- socket Req
_ <- bind req "inproc://reqrep"
rep <- socket Rep
_ <- connect rep "inproc://reqrep"
threadDelay 1000
r <- mapM (\m -> send req m >> recv rep >>= send rep >> recv req >>= return . (== m)) msgs
close req
close rep
threadDelay 1000
return r
assert $ and res
-- test Bus socket
prop_Bus :: Property
prop_Bus = monadicIO $ do
msgs <- pick arbitrary
pre $ not (null msgs)
res <- run $ do
-- Probably not how you're supposed to connect Bus nodes..
b1 <- socket Bus
_ <- bind b1 "inproc://bus1"
b2 <- socket Bus
_ <- connect b2 "inproc://bus1"
_ <- bind b2 "inproc://bus2"
b3 <- socket Bus
_ <- connect b3 "inproc://bus2"
_ <- bind b3 "inproc://bus3"
_ <- connect b1 "inproc://bus3"
threadDelay 1000
r <- mapM (testSockets b1 b2 b3) msgs
close b1
close b2
close b3
threadDelay 1000
return r
assert $ and res
where
testSockets b1 b2 b3 msg = do
send b1 msg
a <- recv b2
b <- recv b3
send b2 msg
c <- recv b1
d <- recv b3
send b3 msg
e <- recv b1
f <- recv b2
return $ all (== msg) [a, b, c, d, e, f]
prop_TestOptions :: Property
prop_TestOptions = monadicIO $ do
res <- run $ do
req <- socket Req
_ <- bind req "tcp://*:5560"
surveyor <- socket Surveyor
_ <- bind surveyor "inproc://surveyor"
threadDelay 1000
setTcpNoDelay req 1
v1 <- tcpNoDelay req
setTcpNoDelay req 0
v2 <- tcpNoDelay req
setRequestResendInterval req 30000
v3 <- requestResendInterval req
setIpv4Only req 0
v4 <- ipv4Only req
setIpv4Only req 1
v5 <- ipv4Only req
setSndPrio req 7
v6 <- sndPrio req
setReconnectInterval req 50
v7 <- reconnectInterval req
setReconnectIntervalMax req 400
v8 <- reconnectIntervalMax req
setRcvBuf req 200000
v9 <- rcvBuf req
setSndBuf req 150000
v10 <- sndBuf req
setLinger req 500
v11 <- linger req
setSurveyorDeadline surveyor 2000
v12 <- surveyorDeadline surveyor
close req
close surveyor
threadDelay 1000
return [v1 == 1, v2 == 0, v3 == 30000, v4 == 0, v5 == 1, v6 == 7,
v7 == 50, v8 == 400, v9 == 200000, v10 == 150000, v11 == 500, v12 == 2000]
assert $ and res
main :: IO ()
main = $defaultMainGenerator
|
christianlavoie/nanomsg-haskell
|
tests/Properties.hs
|
mit
| 6,271 | 0 | 21 | 2,349 | 2,047 | 912 | 1,135 | 189 | 1 |
longest :: [String] -> String -> String
longest (word:rest) current =
if length word > length current
then longest rest word
else longest rest current
longest [] current = current
longestWord :: String -> String
longestWord text = longest (words text) ""
main :: IO ()
main = do
contents <- getContents
print $ length $ longestWord contents
|
considerate/progp
|
Haskell/longest.hs
|
mit
| 349 | 6 | 8 | 66 | 144 | 71 | 73 | 12 | 2 |
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
module KMC.SymbolicFST
(FST(..)
,Edge
,fstStateSize
,fstTransSize
,edgesToList
,edgesFromList
,fstEvalEpsilonEdges
,fstEdges
,fstAbstractEvalEdgesAll
,isChoiceState
,isSkipState
,isJoinState
,coarsestPredicateSet
,prefixTests
,rightClosure,rightInputClosure
,mapEdges
,trim
,eForward,eBackward,eForwardEpsilon,eBackwardEpsilon
,enumerateStates
,run
,runSequential
,runBacktracking
) where
import Control.Applicative
import Control.Monad (guard)
import Data.Map ((!))
import qualified Data.Map as M
import Data.Maybe (maybeToList)
import Data.Monoid
import qualified Data.Set as S
import KMC.Backtracking
import KMC.Theories
import Prelude
-- | (Non-)deterministic Finite State Transducer. To be well-formed, the
-- following conditions must be met:
--
-- (i) For any state s, the only transitions out of s are epsilon-transitions or
-- symbol transitions, but not both.
data FST q pred func =
FST
{ fstS :: S.Set q
, fstE :: OrderedEdgeSet q pred func
, fstI :: q
, fstF :: S.Set q
}
-- | Type synonym for a transition. A transition is either labeled by a
-- predicate and an output function; or it is labeled by an output string. This
-- models a set of transitions between the two states where the output label may
-- depend on the symbol being read; in the case of epsilon-transitions, a plain
-- output string is provided.
type Edge q pred func = (q, Either (pred, func) (Rng func), q)
-- | An ordered edge set with fast reverse lookup.
data OrderedEdgeSet q pred func =
OrderedEdgeSet { eForward :: M.Map q [(pred,func,q)]
, eBackward :: M.Map q [(pred,func,q)]
, eForwardEpsilon :: M.Map q [(Rng func, q)]
, eBackwardEpsilon :: M.Map q [(Rng func, q)]
}
deriving instance (Eq q, Eq pred, Eq func, Eq (Rng func)) => Eq (OrderedEdgeSet q pred func)
deriving instance (Ord q, Ord pred, Ord func, Ord (Rng func)) => Ord (OrderedEdgeSet q pred func)
deriving instance (Show q, Show pred, Show func, Show (Rng func)) => Show (OrderedEdgeSet q pred func)
deriving instance (Eq q, Eq pred, Eq func, Eq (Rng func)) => Eq (FST q pred func)
deriving instance (Ord q, Ord pred, Ord func, Ord (Rng func)) => Ord (FST q pred func)
deriving instance (Show q, Show pred, Show func, Show (Rng func)) => Show (FST q pred func)
-- | Construct an edge set from a list of edges
edgesFromList :: (Ord q) => [Edge q pred func] -> OrderedEdgeSet q pred func
edgesFromList es =
OrderedEdgeSet
{ eForward = M.fromListWith (flip (++)) [ (q, [(a, b, q')]) | (q, Left (a,b), q') <- es ]
, eBackward = M.fromListWith (flip (++)) [ (q', [(a, b, q)]) | (q, Left (a,b), q') <- es ]
, eForwardEpsilon = M.fromListWith (flip (++)) [(q, [(y, q')]) | (q, Right y, q') <- es ]
, eBackwardEpsilon = M.fromListWith (flip (++)) [(q', [(y,q)]) | (q, Right y, q') <- es ]
}
-- | Get the number of states in the FST
fstStateSize :: FST q pred func -> Int
fstStateSize = S.size . fstS
-- | Get the number of transitions in the FST
fstTransSize :: FST q pred func -> Int
fstTransSize fst' = M.size (eForward (fstE fst')) + M.size (eForwardEpsilon (fstE fst'))
{-
unionEdges :: (Ord q) => OrderedEdgeSet q pred func
-> OrderedEdgeSet q pred func
-> OrderedEdgeSet q pred func
unionEdges es es' =
OrderedEdgeSet
{ eForward = M.unionWith (++) (eForward es) (eForward es')
, eBackward = M.unionWith (++) (eBackward es) (eBackward es')
, eForwardEpsilon = M.unionWith (++) (eForwardEpsilon es) (eForwardEpsilon es')
, eBackwardEpsilon = M.unionWith (++) (eBackwardEpsilon es) (eBackwardEpsilon es')
}
-}
flipEdges :: OrderedEdgeSet q pred func -> OrderedEdgeSet q pred func
flipEdges es = OrderedEdgeSet {eForward = eBackward es
,eBackward = eForward es
,eForwardEpsilon = eBackwardEpsilon es
,eBackwardEpsilon = eForwardEpsilon es}
-- | Get the underlying list representation of an ordered edge set
edgesToList :: OrderedEdgeSet q pred func -> [(q, Either (pred, func) (Rng func), q)]
edgesToList es = symEdgesToList (eForward es) ++ epsEdgesToList (eForwardEpsilon es)
-- | Get the list representation of only the symbol edges in an ordered edge set
symEdgesToList :: M.Map q [(pred, func, q)] -> [(q, Either (pred, func) (Rng func), q)]
symEdgesToList es = [ (q, Left (a,b), q') | (q, xs) <- M.toList es, (a,b,q') <- xs ]
-- | Get the list representation of only the epsilon edges in an ordered edge set
epsEdgesToList :: M.Map q [(Rng func, q)] -> [(q, Either (pred, func) (Rng func), q)]
epsEdgesToList es = [ (q, Right y, q') | (q, xs) <- M.toList es, (y, q') <- xs ]
-- | Map four functions over the edges in an FST. Symbol edges may be turned
-- into epsilon edges and vice versa.
mapEdges :: (Ord q)
=> (q -> [(pred1,func1,q)] -> [(pred2,func2,q)])
-> (q -> [(pred1,func1,q)] -> [(Rng func2, q)])
-> (q -> [(Rng func1, q)] -> [(pred2,func2,q)])
-> (q -> [(Rng func1, q)] -> [(Rng func2, q)])
-> FST q pred1 func1 -> FST q pred2 func2
mapEdges symsym symeps epssym epseps fst' =
fst' { fstE = edgesFromList (symEdges1 ++ symEdges2 ++ epsEdges1 ++ epsEdges2) }
where
esym = eForward $ fstE fst'
eeps = eForwardEpsilon $ fstE fst'
symEdges1 = symEdgesToList $ M.mapWithKey symsym $ esym
symEdges2 = symEdgesToList $ M.mapWithKey epssym $ eeps
epsEdges1 = epsEdgesToList $ M.mapWithKey symeps $ esym
epsEdges2 = epsEdgesToList $ M.mapWithKey epseps $ eeps
-- | Given an input symbol and a starting state, compute the concrete set of
-- transitions from that state.
evalEdges :: (Ord st
,Function func
,SetLike pred (Dom func))
=> OrderedEdgeSet st pred func
-> st -> Dom func -> [(Rng func, st)]
evalEdges (OrderedEdgeSet { eForward = me }) q x =
case M.lookup q me of
Nothing -> []
Just es -> concatMap evalEdge es
where
evalEdge (p, f, q')
| member x p = [(eval f x, q')]
| otherwise = []
-- | Given a state, compute the concrete set of epsilon-transitions from that
-- state.
evalEpsilonEdges :: (Ord st) => OrderedEdgeSet st pred func
-> st -> [(Rng func, st)]
evalEpsilonEdges (OrderedEdgeSet { eForwardEpsilon = meps }) q =
case M.lookup q meps of
Nothing -> []
Just es -> es
-- | If abstractEvalEdgesAll fst q phi == [(f1, q1), ..., (fn, qn)], then
-- for all qi and for all a in [[phi]], q steps to qi reading a.
abstractEvalEdgesAll :: (Ord st
,PartialOrder pred)
=>
OrderedEdgeSet st pred func
-> st -> pred -> [(func, st)]
abstractEvalEdgesAll (OrderedEdgeSet { eForward = me}) q p =
case M.lookup q me of
Nothing -> []
Just es -> [ (f, q') | (p', f, q') <- es, p `lte` p' ]
-- | Like evalEpsilonEdges, but given an FST.
fstEvalEpsilonEdges :: (Ord st) => FST st pred func -> st -> [(Rng func, st)]
fstEvalEpsilonEdges aut = evalEpsilonEdges (fstE aut)
-- | Get the list of forward symbol edges from a state
fstEdges :: (Ord st) => FST st pred func -> st -> [(pred, func, st)]
fstEdges fst' q =
case M.lookup q (eForward $ fstE fst') of
Nothing -> []
Just es -> es
-- | Like fstEvalEdges, but given an FST.
fstEvalEdges :: (Ord st
,Function func
,SetLike pred (Dom func))
=> FST st pred func -> st -> Dom func -> [(Rng func, st)]
fstEvalEdges fst' q a = evalEdges (fstE fst') q a
-- | Like abstractEvalEdgesAll, but given an FST.
fstAbstractEvalEdgesAll :: (Ord st
,PartialOrder pred)
=> FST st pred func -> st -> pred -> [(func, st)]
fstAbstractEvalEdgesAll aut = abstractEvalEdgesAll (fstE aut)
-- | Is the given state a non-deterministic choice state, or an input action
-- state?
isChoiceState :: (Ord st) => FST st pred func -> st -> Bool
isChoiceState fst' q =
case (M.member q (eForward . fstE $ fst'), M.member q (eForwardEpsilon . fstE $ fst')) of
(True, True) -> error "Inconsistent FST - a state is both a choice and symbol state"
(_, b) -> b
isSkipState :: (Ord st) => FST st pred func -> st -> Bool
isSkipState fst' q =
case M.lookup q (eForwardEpsilon . fstE $ fst') of
Just [_] -> True
_ -> False
isJoinState :: (Ord st) => FST st pred func -> st -> Bool
isJoinState fst' q =
let nBackEdges = length (concat (maybeToList (M.lookup q (eBackwardEpsilon . fstE $ fst'))))
+ length (concat (maybeToList (M.lookup q (eBackward . fstE $ fst'))))
in nBackEdges > 1
-- | Given a state set A (represented as a list), compute the coarsest predicate
-- set obtained from the set
-- { p | p is a predicate on a transition starting in q, q in A }
coarsestPredicateSet :: (Boolean pred
,PartialOrder pred
,Ord st
,Ord pred) =>
FST st pred func
-> [st]
-> [pred]
coarsestPredicateSet fst' qs = coarsestPartition ps
where
ps = S.toList $ S.fromList
[ p | q <- qs
, (p, _, _) <- maybe [] id (M.lookup q (eForward . fstE $ fst')) ]
-- | Compute an ordered right closure with output
rightClosure :: (Ord st, Monoid (Rng func)) => FST st pred func -> st -> [(Rng func, st)]
rightClosure fst' = snd . go S.empty mempty
where
go vis out q =
case fstEvalEpsilonEdges fst' q of
[] -> (vis, [(out, q)])
xs -> foldl (\(vis', acc) (w, q') ->
if S.member q' vis' then
(vis', acc)
else
let (vis'', ys) = go (S.insert q' vis') (mappend out w) q'
in (vis'', acc ++ ys))
(vis, [])
xs
---------------
-- LCP analysis
---------------
-- | Compute the longest deterministic prefix for a pointed state set
-- (A, q).
ldp :: (Ord st, Ord pred, PartialOrder pred, Boolean pred) =>
FST st pred func -> S.Set st -> st -> [pred]
ldp fst' = go
where
go ctx q =
case M.lookup q (eForward $ fstE fst') of
Just [(p,_,q')] | p `elem` (coarsestPredicateSet fst' $ S.toList ctx) ->
p:go (stepAll fst' p ctx) q'
_ -> []
-- | Given a set A and a predicate p, compute the set A' of states that can be reached via a
-- transition from A labeled by a predicate containing p.
stepAll :: (Ord st, PartialOrder pred) => FST st pred func -> pred -> S.Set st -> S.Set st
stepAll fst' p = S.unions . map aux . S.toList
where
aux q = S.unions $ map (rightInputClosure fst' . snd)
$ fstAbstractEvalEdgesAll fst' q p
-- | Compute an unordered right closure without output on the input automaton of an FST
rightInputClosure :: (Ord st) => FST st pred func -> st -> S.Set st
rightInputClosure fst' = snd . go S.empty
where
go vis q =
case fstEvalEpsilonEdges fst' q of
[] -> (vis, S.singleton q)
xs -> foldl (\(vis', acc) (_, q') ->
if S.member q' vis then
(vis', acc)
else
let (vis'', ys) = go (S.insert q' vis') q'
in (vis'', S.union acc ys))
(vis, S.empty)
xs
prefixTests :: (Boolean pred, PartialOrder pred, Ord st, Ord pred) =>
FST st pred func
-> Bool
-> [st]
-> [([pred], S.Set st)]
prefixTests fst' singletonMode states =
[ (t, killed t) | t <- tests ]
where
ldps = [ (ldp fst' (S.fromList states) q, q) | not singletonMode, q <- states ]
tests = S.toList $ S.fromList $ [ [p] | p <- coarsestPredicateSet fst' states ] ++ map fst ldps
killed t = S.fromList [ q | (ps, q) <- ldps, not (t `entails` ps) ]
entails _ [] = True
entails (t:ts) (p:ps) = (t `eq` p) && (ts `entails` ps)
entails [] (_:_) = False
-- | Substitute state type by any enumerable type.
enumerateStates :: (Ord st, Ord a, Enum a) => FST st pred func -> FST a pred func
enumerateStates fst' =
FST { fstS = S.fromList (M.elems statesMap)
, fstE = edgesFromList
[ (aux q, lbl, aux q') | (q, lbl, q') <- edgesToList (fstE fst') ]
, fstI = aux (fstI fst')
, fstF = S.fromList [ aux q | q <- S.toList (fstF fst') ]
}
where
statesMap = M.fromList (zip (S.toList (fstS fst')) [toEnum 0..])
aux q = statesMap M.! q
accessibleStates :: (Ord q, PartialOrder pred, Boolean pred)
=> OrderedEdgeSet q pred func -> S.Set q -> S.Set q
accessibleStates es initialWS = go initialWS S.empty
where
go ws acc
| S.null ws = acc
| (q, ws') <- S.deleteFindMin ws =
let succs = S.fromList $ map snd (abstractEvalEdgesAll es q bot)
++ map snd (evalEpsilonEdges es q)
in go (S.union ws' (S.difference succs acc)) (S.insert q acc)
coaccessibleStates :: (Ord q, PartialOrder pred, Boolean pred)
=> OrderedEdgeSet q pred func -> S.Set q -> S.Set q
coaccessibleStates es = accessibleStates (flipEdges es)
trim :: (Ord q, PartialOrder pred, Boolean pred)
=> FST q pred func -> FST q pred func
trim fst' =
FST { fstI = fstI fst'
, fstF = S.intersection (fstF fst') useful
, fstS = useful
, fstE = edgesFromList [ (q, lbl, q') | (q, lbl, q') <- edgesToList (fstE fst')
, S.member q useful
, S.member q' useful ]
}
where
useful = S.intersection (accessibleStates (fstE fst') (S.singleton (fstI fst')))
(coaccessibleStates (fstE fst') (fstF fst'))
-------------
-- Simulation
-------------
-- | Simulate a non-deterministic FST with single-symbol predicates
run :: (Function func
,SetLike pred (Dom func)
,Monoid (Rng func)
,Ord st)
=> FST st pred func -> [Dom func] -> [Rng func]
run fst' inp = do
(os, q') <- go [ ([o], q') | (o,q') <- rightClosure fst' (fstI fst') ] inp
guard (S.member q' (fstF fst'))
return (mconcat $ reverse os)
where
go s [] = s
go s (a:as) = go (close . step a $ s) as
close s = prune S.empty [ (o':os, q') | (os, q) <- s, (o', q') <- rightClosure fst' q ]
prune _ [] = []
prune vis ((os,q):s) | S.member q vis = prune vis s
| otherwise = (os,q):prune (S.insert q vis) s
step a s = [ (o':os, q') | (os, q) <- s, (o',q') <- fstEvalEdges fst' q a ]
-- | Simulate a sequential machine with multi-symbol predicates
runSequential :: (Function func
,Dom func ~ [a]
,UniformListSet pred a
,Monoid (Rng func)
,Ord st)
=> FST st pred func -> [a] -> Maybe (Rng func)
runSequential fst' = go (fstI fst')
where
go q [] = if S.member q (fstF fst') then Just mempty else Nothing
go q w | isChoiceState fst' q = do
[(o, q')] <- pure (fstEvalEpsilonEdges fst' q)
o' <- go q' w
return $ mappend o o'
go q w = do
ts <- M.lookup q (eForward $ fstE fst')
[(v, o, q')] <- pure [ (v, eval f u, q')
| (p, f, q') <- ts
, let n = listLength p
, let (u,v) = splitAt n w
, member u p ]
o' <- go q' v
return $ mappend o o'
runBacktracking :: (Monoid m, Function func, SetLike pred (Dom func), Stream s (Dom func))
=> (Rng func -> m)
-> FST Int pred func
-> P s m
runBacktracking phi fst' = lfp!(fstI fst') <* eof
where
lfp = M.fromList [ (q, go q) | q <- S.toList $ fstS fst' ]
go q =
(if isJoinState fst' q then barrier q else pure ())
*> case fstEvalEpsilonEdges fst' q of
[] -> case fstEdges fst' q of
[] -> pure mempty
es -> consume es
-- TODO: Only insert fBarrier if q is on an epsilon-loop
es -> fBarrier q *> choose es
consume es =
foldl1 (<|>)
[ (mappend . phi . eval func <$> litp (flip member p)) <*> (lfp!q')
| (p, func, q') <- es ]
choose [] = empty
choose es = foldl1 (<|>) [ mappend (phi y) <$> (lfp!q') | (y, q') <- es ]
{-
-- | Example: This is how to interpret an action FST
interpAction :: (Function func
,SetLike pred Word8
,Dom func ~ Word8
,Rng func ~ [Either Word8 RegAction])
=> FST Int pred func -> ByteString -> Maybe Action
interpAction fst' b = runP' (runBacktracking (mconcat . map adjActionSem) fst') (0::Int, b)
-}
|
diku-kmc/kleenexlang
|
src/KMC/SymbolicFST.hs
|
mit
| 17,283 | 0 | 21 | 5,322 | 5,930 | 3,149 | 2,781 | 296 | 5 |
{-# LANGUAGE OverloadedStrings #-}
-- | Legacy types from Keter version 0.4. Retained to keep backwards
-- compatibility in config file format.
module Keter.Types.V04 where
import Control.Applicative
import Data.Aeson
import Data.Bool
import Data.Conduit.Network (HostPreference)
import Data.Default
import qualified Data.Set as Set
import Data.String (fromString)
import Data.Yaml.FilePath
import qualified System.FilePath as F
import Keter.Types.Common
import Network.HTTP.ReverseProxy.Rewrite
import qualified Network.Wai.Handler.Warp as Warp
import qualified Network.Wai.Handler.WarpTLS as WarpTLS
import qualified Network.TLS.SessionManager as TLSSession
import Prelude hiding (FilePath)
data AppConfig = AppConfig
{ configExec :: F.FilePath
, configArgs :: [Text]
, configHost :: Text
, configSsl :: Bool
, configExtraHosts :: Set Text
, configRaw :: Object
}
instance ParseYamlFile AppConfig where
parseYamlFile basedir = withObject "AppConfig" $ \o -> AppConfig
<$> lookupBase basedir o "exec"
<*> o .:? "args" .!= []
<*> o .: "host"
<*> o .:? "ssl" .!= False
<*> o .:? "extra-hosts" .!= Set.empty
<*> return o
data BundleConfig = BundleConfig
{ bconfigApp :: Maybe AppConfig
, bconfigStaticHosts :: Set StaticHost
, bconfigRedirects :: Set Redirect
}
instance ParseYamlFile BundleConfig where
parseYamlFile basedir = withObject "BundleConfig" $ \o -> BundleConfig
<$> ((Just <$> parseYamlFile basedir (Object o)) <|> pure Nothing)
<*> lookupBaseMaybe basedir o "static-hosts" .!= Set.empty
<*> o .:? "redirects" .!= Set.empty
data StaticHost = StaticHost
{ shHost :: Text
, shRoot :: FilePath
}
deriving (Eq, Ord)
instance ParseYamlFile StaticHost where
parseYamlFile basedir = withObject "StaticHost" $ \o -> StaticHost
<$> o .: "host"
<*> lookupBase basedir o "root"
data Redirect = Redirect
{ redFrom :: Text
, redTo :: Text
}
deriving (Eq, Ord)
instance FromJSON Redirect where
parseJSON (Object o) = Redirect
<$> o .: "from"
<*> o .: "to"
parseJSON _ = fail "Wanted an object"
data KeterConfig = KeterConfig
{ kconfigDir :: F.FilePath
, kconfigPortMan :: PortSettings
, kconfigHost :: HostPreference
, kconfigPort :: Port
, kconfigSsl :: Maybe TLSConfig
, kconfigSetuid :: Maybe Text
, kconfigReverseProxy :: Set ReverseProxyConfig
, kconfigIpFromHeader :: Bool
, kconfigConnectionTimeBound :: Int
-- ^ Maximum request time in milliseconds per connection.
}
instance Default KeterConfig where
def = KeterConfig
{ kconfigDir = "."
, kconfigPortMan = def
, kconfigHost = "*"
, kconfigPort = 80
, kconfigSsl = Nothing
, kconfigSetuid = Nothing
, kconfigReverseProxy = Set.empty
, kconfigIpFromHeader = False
, kconfigConnectionTimeBound = fiveMinutes
}
-- | Default connection time bound in milliseconds.
fiveMinutes :: Int
fiveMinutes = 5 * 60 * 1000
instance ParseYamlFile KeterConfig where
parseYamlFile basedir = withObject "KeterConfig" $ \o -> KeterConfig
<$> lookupBase basedir o "root"
<*> o .:? "port-manager" .!= def
<*> (fmap fromString <$> o .:? "host") .!= kconfigHost def
<*> o .:? "port" .!= kconfigPort def
<*> (o .:? "ssl" >>= maybe (return Nothing) (fmap Just . parseYamlFile basedir))
<*> o .:? "setuid"
<*> o .:? "reverse-proxy" .!= Set.empty
<*> o .:? "ip-from-header" .!= False
<*> o .:? "connection-time-bound" .!= fiveMinutes
data TLSConfig = TLSConfig !Warp.Settings !WarpTLS.TLSSettings
instance ParseYamlFile TLSConfig where
parseYamlFile basedir = withObject "TLSConfig" $ \o -> do
cert <- lookupBase basedir o "certificate"
key <- lookupBase basedir o "key"
host <- (fmap fromString <$> o .:? "host") .!= "*"
port <- o .:? "port" .!= 443
session <- bool Nothing (Just TLSSession.defaultConfig) <$> o .:? "session" .!= False
return $! TLSConfig
( Warp.setHost host
$ Warp.setPort port
Warp.defaultSettings)
WarpTLS.defaultTlsSettings
{ WarpTLS.certFile = cert
, WarpTLS.keyFile = key
, WarpTLS.tlsSessionManagerConfig = session
}
-- | Controls execution of the nginx thread. Follows the settings type pattern.
-- See: <http://www.yesodweb.com/book/settings-types>.
data PortSettings = PortSettings
{ portRange :: [Port]
-- ^ Which ports to assign to apps. Defaults to unassigned ranges from IANA
}
instance Default PortSettings where
def = PortSettings
-- Top 10 Largest IANA unassigned port ranges with no unauthorized uses known
{ portRange = [43124..44320]
++ [28120..29166]
++ [45967..46997]
++ [28241..29117]
++ [40001..40840]
++ [29170..29998]
++ [38866..39680]
++ [43442..44122]
++ [41122..41793]
++ [35358..36000]
}
instance FromJSON PortSettings where
parseJSON = withObject "PortSettings" $ \_ -> PortSettings
<$> return (portRange def)
|
tolysz/keter
|
Keter/Types/V04.hs
|
mit
| 5,798 | 0 | 29 | 1,866 | 1,272 | 698 | 574 | 129 | 1 |
{- |
Module : $Header$
Description : Conversion to core CspCASL
Copyright : (c) Andy Gimblett and Uni Bremen 2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
Converting sugared CspCASL to core CspCASL.
The following process types are core:
Skip
Stop
PrefixProcess ev p
ExternalPrefixProcess v es p
InternalPrefixProcess v es p
Sequential p q
ExternalChoice p q
InternalChoice p q
GeneralisedParallel p es q
Hiding p es
RelationalRenaming p RENAMING
NamedProcess pn evs
ConditionalProcess CSP_FORMULA p q
(Also the interrupt operator should be added, and core?)
-}
module CspCASL.Core_CspCASL (basicToCore) where
import Common.Id
import CspCASL.AS_CspCASL
import CspCASL.AS_CspCASL_Process
basicToCore :: CspBasicSpec -> CspBasicSpec
basicToCore c = CspBasicSpec (channels c) (core_procs)
where core_procs = map procEqToCore (proc_items c)
procEqToCore (Proc_Eq pn p) = (Proc_Eq pn (procToCore p))
procEqToCore x = x
procToCore :: PROCESS -> PROCESS
procToCore proc = let p' = procToCore in case proc of
-- First the core operators: we just need to recurse.
(Skip r) -> (Skip r)
(Stop r) -> (Stop r)
(PrefixProcess ev p r) -> (PrefixProcess ev (p' p) r)
(ExternalPrefixProcess v es p r) -> (ExternalPrefixProcess v es (p' p) r)
(InternalPrefixProcess v es p r) -> (InternalPrefixProcess v es (p' p) r)
(Sequential p q r) -> (Sequential (p' p) (p' q) r)
(ExternalChoice p q r) -> (ExternalChoice (p' p) (p' q) r)
(InternalChoice p q r) -> (InternalChoice (p' p) (p' q) r)
(GeneralisedParallel p es q r) -> (GeneralisedParallel (p' p) es (p' q) r)
(Hiding p es r) -> (Hiding (p' p) es r)
(RelationalRenaming p rn r) -> (RelationalRenaming (p' p) rn r)
(NamedProcess pn evs r) -> (NamedProcess pn evs r)
(ConditionalProcess f p q r) -> (ConditionalProcess f (p' p) (p' q) r)
-- Non-core, done.
(Interleaving p q r) -> (GeneralisedParallel (p' p)
(EventSet [] nullRange) (p' q) r)
-- Non-core, not done yet.
(Div r) -> (Div r)
(Run es r) -> (Run es r)
(Chaos es r) -> (Chaos es r)
(SynchronousParallel p q r) -> (SynchronousParallel (p' p) (p' q) r)
(AlphabetisedParallel p esp esq q r) ->
(AlphabetisedParallel (p' p) esp esq (p' q) r)
|
nevrenato/Hets_Fork
|
CspCASL/Core_CspCASL.hs
|
gpl-2.0
| 2,439 | 0 | 14 | 585 | 773 | 392 | 381 | 32 | 19 |
{- |
Module : $Header$
Description : Guarded Dependency Store
Copyright : (c) Ewaryst Schulz, DFKI Bremen 2011
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : portable
Definition of guarded dependencies resulting from the use of extended
parameters.
-}
module CSL.GuardedDependencies
where
import Common.AS_Annotation
import Common.Doc
import Common.DocUtils
import CSL.AS_BASIC_CSL
import CSL.ASUtils
import CSL.Sign as Sign
import CSL.EPRelation
import Control.Monad
import qualified Data.Set as Set
import qualified Data.Map as Map
-- ** Datatypes and guarded definitions
-- | A guard consists of the guard range and the corresponding expression
-- together with a name, a set of not propagated parameters and a set of
-- constrained parameters (in the extended parameter specification)
data Guard a = Guard { range :: a
, definition :: EXPRESSION
, assName :: String
, filtered :: Set.Set String
, constrained :: Set.Set String }
prettyGuard :: (a -> Doc) -> Guard a -> Doc
prettyGuard f g = f (range g) <+> text "-->" <+> pretty (definition g)
instance Functor Guard where
fmap f (Guard x e an fs ct) = Guard (f x) e an fs ct
instance Pretty a => Pretty (Guard a) where
pretty = prettyGuard pretty
instance Pretty a => Show (Guard a) where
show = show . pretty
-- | A guarded constant consists of the argument list (for function definitions)
-- and a list of guard-expressions
data Guarded a = Guarded { argvars :: [String]
, guards :: [Guard a] }
{- Comment it in if needed later
undefinedGuard :: String -> a -> Guard a
undefinedGuard s x = Guard { range = x
, definition = err
, assName = err
, filtered = err
, constrained = err }
where err = error $ "undefinedGuard: " ++ s
undefinedGuarded :: String -> a -> Guarded a
undefinedGuarded s x = Guarded { argvars = []
, guards = [undefinedGuard s x] }
-}
prettyGuarded :: (a -> Doc) -> Guarded a -> Doc
prettyGuarded f grdd = vcat $ map (prettyGuard f) $ guards grdd
instance Functor Guarded where
fmap f grdd = grdd { guards = map (fmap f) $ guards grdd }
instance Pretty a => Pretty (Guarded a) where
pretty = prettyGuarded pretty
instance Pretty a => Show (Guarded a) where
show = show . pretty
type GuardedMap a = Map.Map String (Guarded a)
addAssignment :: String -> OpDecl -> EXPRESSION -> GuardedMap [EXTPARAM]
-> GuardedMap [EXTPARAM]
addAssignment n (OpDecl sc epl al _) def m =
let combf x y | argvars x == argvars y = y { guards = guards y ++ guards x }
| otherwise =
error "addAssignment: the argument vars does not match."
grd = Guarded (map varDeclName al) [uncurry (Guard epl def n)
$ filteredConstrainedParams epl]
in Map.insertWith combf (simpleName $ OpUser sc) grd m
{- TODO:
1. analysis for missing definitions and undeclared extparams
2. Integrating extparam domain definitions
3. check for each constant if the Guards exhaust the extparam domain (in splitAS)
-}
-- | Splits the Commands into the AssignmentStore and a program sequence
splitAS :: [Named CMD] -> (GuardedMap [EXTPARAM], [Named CMD])
splitAS cl =
let f nc (m,l) = case sentence nc of
Ass c def -> (addAssignment (senAttr nc) c def m, l)
_ -> (m, nc:l)
in foldr f (Map.empty, []) cl
|
nevrenato/Hets_Fork
|
CSL/GuardedDependencies.hs
|
gpl-2.0
| 3,713 | 0 | 15 | 1,079 | 817 | 428 | 389 | 50 | 2 |
{-# LANGUAGE Arrows, NoMonomorphismRestriction #-}
{-
-}
module Main
( main
, mkH1
, mkH2
, mkH3
, mkH4
, mkH5
, mkH6
)
where
{- Standard Library Modules Imported -}
import Control.Arrow
( (>>>) )
import System.Console.GetOpt
( getOpt
, usageInfo
, ArgOrder ( .. )
, OptDescr ( .. )
, ArgDescr ( .. )
)
import System.Environment
( getArgs )
import System.Exit
( exitWith
, ExitCode ( .. )
)
{- External Library Modules Imported -}
import Text.XML.HXT.Arrow
( runX
, ArrowXml
, XmlTree
, IOSArrow
, mkelem
, sattr
)
import qualified Text.XML.HXT.Arrow as Hxt
{- Local Modules Imported -}
{- End of Imports -}
data CliFlag =
CliHelp
| CliVersion
deriving Eq
options :: [ OptDescr CliFlag ]
options =
[ Option "h" [ "help" ]
(NoArg CliHelp)
"Print the help message to standard out and then exit"
, Option "v" [ "version" ]
(NoArg CliVersion)
"Print out the version of this program"
]
helpMessage :: String
helpMessage =
usageInfo "ipcwebgen" options
versionMessage :: String
versionMessage = "This is version 0.001"
-- | The main exported function
main :: IO ()
main = getArgs >>= processOptions
-- process the options using the getOpt library function.
-- If we
processOptions :: [ String ] -> IO ()
processOptions cliArgs =
case getOpt Permute options cliArgs of
(flags, args, []) ->
processArgs flags args
(_flags, _args, errors) ->
ioError $ userError (concat errors ++ helpMessage)
-- Currently there are no arguments so we just ignore them.
-- If we are not doing a trivial invocation then we produce the
-- web page.
processArgs :: [ CliFlag ] -> [ String ] -> IO ()
processArgs flags _files
| elem CliHelp flags = putStrLn helpMessage
| elem CliVersion flags = putStrLn versionMessage
| otherwise = produceWebPage flags
-- Produces the web page by running the 'writeIpcPage' arrow.
produceWebPage :: [ CliFlag ] -> IO ()
produceWebPage _flags =
do [rc] <- runX writeIpcPage
if rc >= Hxt.c_err
then exitWith (ExitFailure (negate 1))
else exitWith ExitSuccess
-- Creates an arrow in which we write the main ipc page to a file.
writeIpcPage :: IOSArrow XmlTree Int
writeIpcPage =
Hxt.root [] [ mainIpcPage ]
>>>
Hxt.writeDocument [(Hxt.a_indent, Hxt.v_1)] "ipc.html"
>>>
Hxt.getErrStatus
-- The main ipc web page stored as an xml arrow
mainIpcPage :: ArrowXml a => a XmlTree XmlTree
mainIpcPage =
mkelem "html" []
[ mkelem "head" []
headElements
, mkelem "body" [ sattr "class" "PEPA"
, sattr "bgcolor" "#FFFFFF"
, sattr "link" "#FF0000"
, sattr "vlink" "#FF0000"
, sattr "alink" "#FF0000"
]
( heading : mainIpcBody)
]
where
headElements = [ mkelem "title" [] [ Hxt.txt "International PEPA Compiler" ]
, mkelem "meta" [ sattr "http-equiv" "Content-Type"
, sattr "content" "text/html; charset=us-ascii"
] []
, mkelem "link" [ sattr "rel" "SHORTCUT ICON"
, sattr "href""http://www.dcs.ed.ac.uk/pepa/favicon3.ico"
] []
, mkelem "link" [ sattr "rel" "stylesheet"
, sattr "type" "text/css"
, sattr "href" cssUrl
] []
, mkelem "link" [ sattr "rel" "alternate"
, sattr "type" "application/rss+xml"
, sattr "href" rssUrl
, sattr "title" "PEPA RSS feed"
] []
]
cssUrl = "http://www.dcs.ed.ac.uk/pepa/pepa.css"
rssUrl = "http://www.dcs.ed.ac.uk/pepa/news/feed.rss"
heading = mkCenter [ headingTable ]
headingTable = mkTable [ sattr "align" "center"
, sattr "width" "100%"
, sattr "cellpadding" "8"
, sattr "summary" "banner"
]
[ mkRow [] [ mkCell [] [ pepaLogoSmall ]
, mkCell [] [ titleTable ]
, mkCell [] [ pepaLogoSmall ]
]
]
titleTable = mkTable [ sattr "align" "center"
, sattr "width" "100%"
, sattr "summary" perfEvalProAlg
]
[ titleTableRow1, titleTableRow2 ]
perfEvalProAlg = "Performance Evaluation Process Algebra"
titleTableRow1 = mkRow [ sattr "align" "center" ] [ mkCell [] [ pepa ] ]
titleTableRow2 = mkRow [ sattr "align" "center" ] [ mkCell [] [ h1Title ] ]
h1Title = mkH1 [mkItalics [mkFontifiedText "#FF0000" "4" titleText]]
titleText = "International PEPA Compiler"
pepa = mkFontifiedText "#FF0000" "6" "PEPA"
--------
pepaLogoSmall :: ArrowXml a => a n XmlTree
pepaLogoSmall =
mkelem "img" attributes []
where
attributes = [ sattr "src" "http://www.dcs.ed.ac.uk/pepa/pepasmall.gif"
, sattr "width" "43"
, sattr "height" "28"
, sattr "alt" "PEPA"
]
mainIpcBody :: ArrowXml a => [ a n XmlTree ]
mainIpcBody =
[ mkParagraph [ mkBold [ mkFontifiedText colourRed "+1" theIPC ] ]
, mkParHead "Background"
, mkParagraph [ Hxt.txt "The International PEPA compiler ("
, ipc
, Hxt.txt ") is a continuation of work on an earlier compiler"
, Hxt.txt " for PEPA, the Imperial PEPA compiler, which "
, Hxt.txt " comiles PEPA models into the input language "
, Hxt.txt "of Will Knottenbelt's "
, dnamaca
, Hxt.txt " tool."
, Hxt.txt "Work on the Imperial PEPA Compiler was undertaken "
, Hxt.txt "at Imperial College, London "
, Hxt.txt "and is available from there."
, Hxt.txt "See the "
, mkLink "http://www.doc.ic.ac.uk/ipc/"
[ Hxt.txt "Imperial PEPA compiler web site" ]
, Hxt.txt " for further details."
]
, mkParHead "Modelling Features"
, mkParagraph [ Hxt.txt "IPC supports the full PEPA language plus "
, Hxt.txt "some non-standard extensions. "
, Hxt.txt "The extensions are:"
]
, mkUlist $ map (mkListItem . (: []) . Hxt.txt) features
, mkParagraph [ Hxt.txt "Thorough static analysis of the input PEPA model "
, Hxt.txt "is performed. "
, Hxt.txt "The current list of static analysis checks detects:"
]
, mkUlist $ map (mkListItem . (: []) . Hxt.txt) sanalyses
, mkParHead "Measurement Features"
, mkParagraph [ Hxt.txt "The International PEPA Compiler supports the full "
, Hxt.txt "measurement specification language XSP"
, Hxt.txt "(eXtended Stochastic Probes). "
, Hxt.txt "This allows us to specify the set(s) of states"
, Hxt.txt "in which we are interested in measuring. "
, Hxt.txt "IPC natively supports the computation of "
, Hxt.txt "probability density and cumulative distribution"
, Hxt.txt "functions for passage-time queries. "
, Hxt.txt "If one specifies that compilation should use the "
, dnamaca
, Hxt.txt " tool to perform the actual numerical analysis then"
, Hxt.txt "one can also perform transient and steady-state "
, Hxt.txt "analysis of the model."
]
, mkParHead "Visualisation Features"
, mkParagraph [ Hxt.txt "The model may (with or without the addition of "
, Hxt.txt "measurement probes) may be output as a "
, mkelem "tt" [] [ Hxt.txt ".dot" ]
, Hxt.txt "file which is in turn converted to a scalable"
, Hxt.txt "vector graphics file (SVG) depicting the state "
, Hxt.txt "space of the input model. The "
, mkelem "tt" [] [ Hxt.txt ".dot" ]
, Hxt.txt "file may also be transformed into a PDF file "
, Hxt.txt "or other output formats see "
, mkLink "www.graphviz.org"
[ Hxt.txt "the graphviz website" ]
, Hxt.txt " for more details."
]
, mkParHead "Experimental Features"
, mkUlist $ map (mkListItem . (: []) . Hxt.txt) expFeats
, mkParHead "Implementation Details"
, mkParagraph [ Hxt.txt "The International PEPA Compiler is written "
, Hxt.txt "in the lazy functional programming language "
, mkLink "http://www.haskell.org"
[ Hxt.txt "Haskell" ]
]
, mkParHead "Downloading and Installing IPC"
, mkParagraph [ Hxt.txt "We provide binary distributions of the "
, mkBold [ Hxt.txt "smc/ipclib/hydra" ]
, Hxt.txt " tool chain for Windows and Linux. "
, Hxt.txt "Windows users must have "
, mkBold [ Hxt.txt "cygwin" ]
, Hxt.txt " installed. It is available for download at "
, mkLink "http://www.cygwin.com"
[ Hxt.txt "http://www.cygwin.com" ]
, Hxt.txt ". Ensure that you include "
, mkBold [ Hxt.txt "gcc"]
, Hxt.txt " and "
, mkBold [ Hxt.txt "g++"]
, Hxt.txt " in your distribution."
]
, mkUlist $ map mkListItem downInstr
, mkOlist $ map mkListItem instInstr
, mkParHead "Building ipc/ipclib and smc from source"
, mkParagraph [ Hxt.txt "The source code for the ipc and smc compilers"
, Hxt.txt " is contained within the ipclib source distribution."
, Hxt.txt " This in turn is stored in a "
, mkLink "http://darcs.net" [ Hxt.txt "darcs" ]
, Hxt.txt " repository. "
, Hxt.txt "There are two ways to obtain the source"
]
, mkOlist $ map mkListItem getSourceI
, mkParagraph [ Hxt.txt "Once you have obtained the source code the "
, Hxt.txt "library and related tools can be compiled "
, Hxt.txt "with the following commands:"
]
, mkOlist $ map mkListItem buildCommands
, mkParagraph [ Hxt.txt "Optionally to install the package "
, Hxt.txt "run the following command:"
]
, installCommand
, mkParagraph [ Hxt.txt "If you do not have root permissions "
, Hxt.txt "(and even if so it's generally a good idea) "
, Hxt.txt "you can install it to a local directory of "
, Hxt.txt "your choice, such as: "
, mkBold [ Hxt.txt "${HOME}/install" ]
, Hxt.txt " by exchanging the first command for the command:"
]
, mkCommandLine "runhaskell Setup.hs configure --user --prefix ${HOME}/install"
, mkParagraph [ Hxt.txt "This will place the executable programs in: "
, mkBold [ Hxt.txt "${HOME}/install/bin/" ]
, Hxt.txt " which should be in your path."
]
, mkParHead "Compiling ipclib under ghc 6.6"
, mkParagraph [ Hxt.txt "During the switch from ghc version 6.6 to "
, Hxt.txt "ghc version 6.8 the 'base' libraries were split "
, Hxt.txt "into several separate packages and the new 'base' "
, Hxt.txt "package was much smaller. For example 'Data.Map' "
, Hxt.txt "was put into the package 'containers'. "
, Hxt.txt "This means that the ipclib.cabal file specifies as "
, Hxt.txt "packages which the build depends upon some which were "
, Hxt.txt "simply not packages in ghc version 6.6. "
, Hxt.txt "Therefore your build configuration will fail with some "
, Hxt.txt "message about build dependencies "
, Hxt.txt "'directory and containers' not being met."
]
, mkParagraph [ Hxt.txt "To resolve this it is recommended that you upgrade "
, Hxt.txt "to ghc version 6.8, if however this is "
, Hxt.txt "impossible/awkward then in the darcs repository "
, Hxt.txt "there is a cabal file which should work with ghc "
, Hxt.txt "version 6.6. This is called: "
, mkBold [ Hxt.txt "ghc.6.6.ipclib_cabal" ]
, Hxt.txt ". To make your build work do, overwrite the "
, mkBold [ Hxt.txt "ipclib.cabal" ]
, Hxt.txt " with the "
, mkBold [ Hxt.txt "ghc6.6.ipclib_cabal" ]
, Hxt.txt " file. You will also need Neil Mitchell's "
, mkBold [ Hxt.txt "filepath" ]
, Hxt.txt " library (which is included with the ghc 6.8 "
, Hxt.txt "version but not 6.6) you can download and install "
, Hxt.txt "in the usual cabal way from "
, mkLink "http://www-users.cs.york.ac.uk/~ndm/filepath/"
[ Hxt.txt "Neil's homepage" ]
]
, mkParagraph [ Hxt.txt "If you do this and you wish to send a patch, "
, Hxt.txt "please remember "
, mkBold [ Hxt.txt "not" ]
, Hxt.txt " to record the (irrelevant) changes to the "
, mkBold [ Hxt.txt "ipclib.cabal" ]
, Hxt.txt "file."
]
, mkParHead "Hydra from Source"
, mkParagraph [ Hxt.txt "Hydra is an optional companion to "
, ipc
, Hxt.txt " which may help to solve larger models. "
, Hxt.txt "The source for "
, hydra
, Hxt.txt " (a Markov chain solver) can also be downloaded "
, Hxt.txt "via darcs. The command is: "
, mkCommandLine hydDarcsGet
, Hxt.txt "Again the source can alternatively be downloaded "
, Hxt.txt "via the tarball at: "
, mkLink hydTarUrl [ Hxt.txt "hydra.tar.gz" ]
]
, mkParagraph [ Hxt.txt "To compile and install issue the commands:" ]
, mkOlist $ map mkListItem hydraCommands
, mkParagraph [ Hxt.txt "Usually you will need to become 'root' "
, Hxt.txt "for the final command. "
, Hxt.txt "As for most autoconf managed programs if "
, Hxt.txt "you wish to install in a non-standard location "
, Hxt.txt "then provide the "
, mkBold [ Hxt.txt "./configure" ]
, Hxt.txt " command with a "
, mkBold [ Hxt.txt "==prefix" ]
, Hxt.txt " option. A typical user install is: "
]
, mkOlist $ map mkListItem hydUserComms
, mkParagraph [ Hxt.txt "Making sure that "
, mkBold [ Hxt.txt "${HOME}/install/bin" ]
, Hxt.txt " is in your "
, mkBold [ Hxt.txt "$PATH" ]
, Hxt.txt " environment variable."
]
]
where
-- mkParHead :: String -> a n XmlTree
mkParHead s = mkParagraph [ mkBold [ mkFontifiedText colourRed "" s ] ]
colourRed = "#FF0000"
theIPC = "IPC: The International PEPA Compiler"
ipc = mkBold [ Hxt.txt "ipc" ]
hydra = mkBold [ Hxt.txt "Hydra" ]
dnamaca = mkBold [ Hxt.txt "DNAmaca" ]
features = [ "immediate actions with optional immediate rates;"
, "functional rates; and"
, "process arrays with or without cooperation"
]
sanalyses = [ "an undefined rate parameter which is used;"
, "a defined rate parameter which is not then used;"
, "an undefined process name which is used"
, "a defined process name which is not used;"
, "self-loops on states " ++
"(which have no meaning at the Markov chain level);"
, "deadlocked states;"
, "cooperations in which one or both sides do not perform "
++ "all of the actions in the cooperation set; and"
, "unnecessary hiding of actions which are not performed by the component."
]
expFeats = [ "translation into PRISM model format - This " ++
"works well for transformation to an explicit " ++
"state space but not as well for PRISM's native model " ++
"description format."
, "Translation into FSP - The implementation of this is " ++
"quite advanced but requires further testing."
, "Translation into Dizzy format - We have only an " ++
"immature implementation so far."
]
downInstr = [ [ Hxt.txt "To download the Windows binary distribution, click "
, mkLink windistUrl [ Hxt.txt "here" ]
]
, [ Hxt.txt "To download the Linux binary distribution, click "
, mkLink lindistUrl [ Hxt.txt "here" ]
, Hxt.txt " - It has been successfully tested on "
, Hxt.txt "Linux Fedora Core 6"
]
]
aclarkUrl = "http://homepages.inf.ed.ac.uk/aclark6/downloads/"
windistUrl = aclarkUrl ++ "ipclib-win-binary-distribution.zip"
lindistUrl = aclarkUrl ++ "ipclib-linux-binary-distribution.tar.gz"
instInstr = [ [ Hxt.txt "Unzip the distribution zip file "
, Hxt.txt "into your filesystem."
]
, [ Hxt.txt "Update your "
, mkBold [ Hxt.txt "PATH" ]
, Hxt.txt " environment variable with "
, mkBold [ Hxt.txt "<local_dir>/ipclib/bin" ]
, Hxt.txt " where "
, mkBold [ Hxt.txt "<local_dir>" ]
, Hxt.txt " is the directory into which you "
, Hxt.txt "unzipped the distribution."
]
]
getSourceI = [ [ Hxt.txt "If you have darcs and you wish to possibly "
, Hxt.txt "modify the source then this is the best way "
, Hxt.txt "to obtain the source code. "
, Hxt.txt "The repository is located at: "
, mkLink ipcRepoUrl [ mkBold [ Hxt.txt ipcRepoUrl ] ]
, Hxt.txt " You can obtain the source through "
, Hxt.txt " the darcs command: "
, mkCommandLine ipcDarcsGet
]
, [ Hxt.txt "Alternatively one can download a "
, mkLink ipcTarUrl [ Hxt.txt "source tarball" ]
, Hxt.txt "And then issue the command: "
, mkCommandLine "tar xzf ipclib.tar.gz"
]
]
ipcRepoUrl = "http://groups.inf.ed.ac.uk/srmc/ipc/ipclib/"
ipcTarUrl = ipcRepoUrl ++ "ipclib.tar.gz"
ipcDarcsGet = "darcs get " ++ ipcRepoUrl
buildCommands = [ [ mkBold [ Hxt.txt "runhaskell Setup configure" ] ]
, [ mkBold [ Hxt.txt "runhaskell Setup build" ] ]
]
installCommand = mkOlist [ mkListItem
[ mkBold [ Hxt.txt "runhaskell Setup install" ] ]
]
hydRepoUrl = "http://groups.inf.ed.ac.uk/srmc/ipc/hydra"
hydTarUrl = hydRepoUrl ++ "hydra.tar.gz"
hydDarcsGet = "darcs get " ++ hydRepoUrl
hydraCommands = listTxtCommands
[ "cd hydra/src/"
, "autoreconf"
, "./configure"
, "make"
, "make install"
]
hydUserComms = listTxtCommands
[ "cd hydra/src/"
, "autoreconf"
, "./configure ==prefix ${HOME}/install/"
, "make"
, "make install"
]
listTxtCommands = map ( (: []) . mkBold . (: []) . Hxt.txt )
mkCommandLine s = mkUlist [ mkListItem [ mkBold [ Hxt.txt s ] ] ]
{-
<!--
<h3>Hydra from source</h3>
-->
-}
--------
-- Couple of help type synonyms
-- type MakeElem a n = ArrowXml a => [ a n XmlTree ] -> a n XmlTree
-- type MakeElemWith a n = ArrowXml a =>
-- [ a n XmlTree ] -> [ a n XmlTree ] -> a n XmlTree
--------
mkCenter :: ArrowXml a => [ a n XmlTree ] -> a n XmlTree
mkCenter = mkelem "center" []
mkTable :: ArrowXml a => [ a n XmlTree ] -> [ a n XmlTree ] -> a n XmlTree
mkTable = mkelem "table"
mkRow :: ArrowXml a => [ a n XmlTree ] -> [ a n XmlTree ] -> a n XmlTree
mkRow = mkelem "tr"
mkCell :: ArrowXml a => [ a n XmlTree ] -> [ a n XmlTree ] -> a n XmlTree
mkCell = mkelem "td"
mkH1 :: ArrowXml a => [a n XmlTree] -> a n XmlTree
mkH1 = mkelem "h1" []
mkH2 :: ArrowXml a => [a n XmlTree] -> a n XmlTree
mkH2 = mkelem "h2" []
mkH3 :: ArrowXml a => [a n XmlTree] -> a n XmlTree
mkH3 = mkelem "h3" []
mkH4 :: ArrowXml a => [a n XmlTree] -> a n XmlTree
mkH4 = mkelem "h4" []
mkH5 :: ArrowXml a => [a n XmlTree] -> a n XmlTree
mkH5 = mkelem "h5" []
mkH6 :: ArrowXml a => [a n XmlTree] -> a n XmlTree
mkH6 = mkelem "h6" []
mkItalics :: ArrowXml a => [ a n XmlTree ] -> a n XmlTree
mkItalics = mkelem "i" []
mkBold :: ArrowXml a => [ a n XmlTree ] -> a n XmlTree
mkBold = mkelem "b" []
mkFont :: ArrowXml a => [ a n XmlTree ] -> [ a n XmlTree ] -> a n XmlTree
mkFont = mkelem "font"
mkFontifiedText :: ArrowXml a => String -> String -> String -> a n XmlTree
mkFontifiedText colour size text =
mkFont [ sattr "color" colour
, sattr "size" size
]
[ Hxt.txt text ]
mkParagraph :: ArrowXml a => [ a n XmlTree ] -> a n XmlTree
mkParagraph = mkelem "p" []
mkLink :: ArrowXml a => String -> [ a n XmlTree ] -> a n XmlTree
mkLink url = mkelem "a" [ sattr "href" url ]
mkUlist :: ArrowXml a => [ a n XmlTree ] -> a n XmlTree
mkUlist = mkelem "ul" []
mkOlist :: ArrowXml a => [ a n XmlTree ] -> a n XmlTree
mkOlist = mkelem "ol" []
mkListItem :: ArrowXml a => [ a n XmlTree ] -> a n XmlTree
mkListItem = mkelem "li" []
|
allanderek/ipclib
|
web/ipc_web_gen/IpcWeb.hs
|
gpl-2.0
| 22,718 | 0 | 14 | 8,370 | 4,399 | 2,271 | 2,128 | 418 | 2 |
module QLogic.Examples (Lantern(..)
, module QLogic.Poset.Examples
, lanternLogic
, boolean3Logic
, booleanLogic
, concreteLogic
, evenSubsets
, threeBoxLogic)
where
import QLogic
import QLogic.Poset.Examples
import QLogic.Poset.Generic
import QLogic.Poset.ConcretePoset
import QLogic.Utils
import Data.IntSet (IntSet, fromList, isSubsetOf, difference)
-- = Examples
-- |Chinesse lantern logic
lanternLogic :: QLogic (Poset Lantern) Lantern
lanternLogic = fromPOrdStruct lanternPoset lanternOcmpl
where
lanternOcmpl Zero = One
lanternOcmpl X0 = X1
lanternOcmpl X1 = X0
lanternOcmpl Y0 = Y1
lanternOcmpl Y1 = Y0
lanternOcmpl One = Zero
boolean3Logic :: QLogic (Poset Boolean3) Boolean3
boolean3Logic = fromPOrdStruct boolean3Poset b3ortho
where
b3ortho Empty = S123
b3ortho S0 = S12
b3ortho S1 = S02
b3ortho S2 = S01
b3ortho S01 = S2
b3ortho S02 = S1
b3ortho S12 = S0
b3ortho S123 = Empty
-- |Boolean logic (subsets of sample space)
booleanLogic :: [Int] -> QLogic ConcretePosetInt IntSet
booleanLogic space = fromPOrdStruct (booleanPoset space) booleanOcmpl
where
spaceSet = fromList space
booleanOcmpl = difference spaceSet
concreteLogic :: [Int] -> [[Int]] -> QLogic ConcretePosetInt IntSet
concreteLogic space els = fromPOrdStruct (ConcretePosetInt elems) booleanOcmpl
where
elems = map fromList els
spaceSet = fromList space
booleanOcmpl = difference spaceSet
evenSubsets :: Int -> QLogic ConcretePosetInt IntSet
evenSubsets n = concreteLogic space $ filter (even . length) $ subsets space
where
space = [0..n-1]
data ThreeBox = TZero | A0 | A1 | B0 | B1 | C0 | C1 | A0C0 | A1C0 | B0C0 | B1C0 | TOne deriving (Bounded, Eq, Enum, Ord, Show)
instance POrd ThreeBox where
TZero .<=. _ = True
_ .<=. TOne = True
A0 .<=. C1 = True
A0 .<=. A0C0 = True
A1 .<=. C1 = True
A1 .<=. A1C0 = True
B0 .<=. C1 = True
B0 .<=. B0C0 = True
B1 .<=. C1 = True
B1 .<=. B1C0 = True
C0 .<=. A0C0 = True
C0 .<=. A1C0 = True
C0 .<=. B0C0 = True
C0 .<=. B1C0 = True
a .<=. b = a == b
threeBoxLogic :: QLogic (Poset ThreeBox) ThreeBox
threeBoxLogic = fromPOrdStruct (fromPOrd els) ortho
where
els = [minBound..maxBound] :: [ThreeBox]
ortho TZero = TOne
ortho TOne = TZero
ortho A0 = A1C0
ortho A1 = A0C0
ortho B0 = B1C0
ortho B1 = B0C0
ortho C0 = C1
ortho C1 = C0
ortho A0C0 = A1
ortho A1C0 = A0
ortho B0C0 = B1
ortho B1C0 = B0
-- this is not a logic (sup law does not hold)
-- data Envelope = EZero | L0 | R0 | L1 | R1 | EOne deriving (Bounded, Eq, Enum, Ord, Show)
--
-- instance POrd Envelope where
-- EZero .<=. _ = True
-- _ .<=. EOne = True
-- L0 .<=. L1 = True
-- L0 .<=. R1 = True
-- R0 .<=. L1 = True
-- R0 .<=. R1 = True
-- _ .<=. _ = False
--
-- envelopeLogic :: QLogic Envelope
-- envelopeLogic = fromPOrdStruct (fromPOrd els) ortho
-- where
-- els = [minBound..maxBound] :: [Envelope]
-- ortho EZero = EOne
-- ortho EOne = EZero
-- ortho L0 = R1
-- ortho R0 = L1
-- ortho L1 = R0
-- ortho R1 = L0
|
ttylec/QLogic
|
src/QLogic/Examples.hs
|
gpl-3.0
| 3,622 | 0 | 9 | 1,252 | 856 | 456 | 400 | 76 | 12 |
{-# language TypeFamilies, FlexibleInstances, DeriveFunctor #-}
module Data.Sparse.Internal.SList where
import Data.Sparse.Utils
import Numeric.LinearAlgebra.Class
-- | Sparse list
newtype SList a = SL {unSL :: [(Int, a)]} deriving (Eq, Show, Functor)
emptySL :: SList a
emptySL = SL []
consSL :: (Int, a) -> SList a -> SList a
consSL x (SL xs) = SL (x : xs)
headSL :: SList a -> Maybe (Int, a)
headSL (SL (x:_)) = Just x
headSL (SL []) = Nothing
fromList :: [(Int, a)] -> SList a
fromList = SL
toList :: SList a -> [(Int, a)]
toList = unSL
{-|
NB : unionWith and intersectWith work only if the indices are _sorted_
NB2 : we use the _descending_ order comparison
-}
-- | Inner product between sparse lists
sldot :: (Elt a, Ord i) => [(i, a)] -> [(i, a)] -> a
sldot u v = sum $ intersectWithD pf u v where
pf x y = conj x * y
-- | Vector sum of sparse lists
slsum :: (Ord i, Elt a) => [(i, a)] -> [(i, a)] -> [(i, a)]
slsum = unionWithD (+) 0
-- | `vector-space` instances
instance Elt a => AdditiveGroup (SList a) where
zeroV = SL []
negateV = fmap (* (-1))
u ^+^ v = SL $ slsum (unSL u) (unSL v)
instance Elt a => VectorSpace (SList a) where
type Scalar (SList a) = a
a .* v = fmap (* a) v
instance (AdditiveGroup a, Elt a) => InnerSpace (SList a) where
u <.> v = sldot (unSL u) (unSL v)
-- instance InnerSpace (SList Double) where
-- u <.> v = inner (unSV u) (unSV v)
-- instance InnerSpace (SList (Complex Double)) where
-- u <.> v = innerC (unSV u) (unSV v)
-- test data
l1, l2 :: [(Int, Double)]
l1 = [(0, pi), (2, pi), (3, 5.4) ]
l2 = [(1, exp 1), (2, 3.4)]
-- l1c :: [(Int, Complex Double)]
-- l1c = zip ii $ zipWith (:+) [1..3] [3,2,1] where
-- ii = [0, 2, 5]
-- sl1c = SL l1c
-- helpers
-- sortIndices :: [(IM.Key, a)] -> [(IM.Key, a)]
-- sortIndices = IM.toList . IM.fromList
|
ocramz/sparse-linear-algebra
|
src/Data/Sparse/Internal/SList.hs
|
gpl-3.0
| 1,853 | 0 | 9 | 426 | 678 | 382 | 296 | 33 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.