code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# OPTIONS_GHC -Wall #-}
module Reporting.Error.Type where
import qualified Data.Map as Map
import qualified Data.Maybe as Maybe
import Text.PrettyPrint.ANSI.Leijen
( Doc, (<>), (<+>), colon, dullyellow
, fillSep, hang, indent, text, underline, vcat
)
import qualified AST.Type as Type
import qualified AST.Variable as Var
import qualified Reporting.Error.Helpers as Help
import qualified Reporting.Region as Region
import qualified Reporting.Render.Type as RenderType
import qualified Reporting.Report as Report
-- ERRORS
data Error
= Mismatch Mismatch
| BadMain Type.Canonical
| InfiniteType String Type.Canonical
data Mismatch = MismatchInfo
{ _hint :: Hint
, _leftType :: Type.Canonical
, _rightType :: Type.Canonical
, _reason :: Maybe Reason
}
data Reason
= MessyFields [String] [String]
| IntFloat
| TooLongComparableTuple Int
| BadVar (Maybe VarType) (Maybe VarType)
data VarType
= Number
| Comparable
| Appendable
| CompAppend
| Rigid (Maybe String)
data Hint
= CaseBranch Int Region.Region
| Case
| IfCondition
| IfBranches
| MultiIfBranch Int Region.Region
| If
| List
| ListElement Int Region.Region
| BinopLeft Var.Canonical Region.Region
| BinopRight Var.Canonical Region.Region
| Binop Var.Canonical
| Function (Maybe Var.Canonical)
| UnexpectedArg (Maybe Var.Canonical) Int Int Region.Region
| FunctionArity (Maybe Var.Canonical) Int Int Region.Region
| BadTypeAnnotation String
| Instance String
| Literal String
| Pattern Pattern
| Shader
| Range
| Lambda
| Record
data Pattern
= PVar String
| PAlias String
| PData String
| PRecord
-- TO REPORT
toReport :: RenderType.Localizer -> Error -> Report.Report
toReport localizer err =
case err of
Mismatch info ->
mismatchToReport localizer info
InfiniteType name overallType ->
infiniteTypeToReport localizer name overallType
BadMain tipe ->
Report.report
"BAD MAIN TYPE"
Nothing
"The 'main' value has an unsupported type."
( Help.stack
[ Help.reflowParagraph $
"I need an Element, Html, (Signal Element), or (Signal Html) so I can render it\
\ on screen, but you gave me:"
, indent 4 (RenderType.toDoc localizer tipe)
]
)
-- TYPE MISMATCHES
mismatchToReport :: RenderType.Localizer -> Mismatch -> Report.Report
mismatchToReport localizer (MismatchInfo hint leftType rightType maybeReason) =
let
report =
Report.report "TYPE MISMATCH"
cmpHint leftWords rightWords extraHints =
comparisonHint localizer leftType rightType leftWords rightWords
( Maybe.maybeToList (reasonToString =<< maybeReason)
++ map toHint extraHints
)
in
case hint of
CaseBranch branchNumber region ->
report
(Just region)
( "The " ++ ordinalPair branchNumber
++ " branches of this `case` produce different types of values."
)
( cmpHint
("The " ++ Help.ordinalize (branchNumber -1) ++ " branch has this type:")
("But the " ++ Help.ordinalize branchNumber ++ " is:")
[ "All branches in a `case` must have the same type. So no matter\
\ which one we take, we always get back the same type of value."
]
)
Case ->
report
Nothing
( "All the branches of this case-expression are consistent, but the overall\n"
++ "type does not match how it is used elsewhere."
)
( cmpHint
"The `case` evaluates to something of type:"
"Which is fine, but the surrounding context wants it to be:"
[]
)
IfCondition ->
report
Nothing
"This condition does not evaluate to a boolean value, True or False."
( cmpHint
"You have given me a condition with this type:"
"But I need it to be:"
[ "Elm does not have \"truthiness\" such that ints and strings and lists\
\ are automatically converted to booleans. Do that conversion explicitly."
]
)
IfBranches ->
report
Nothing
"The branches of this `if` produce different types of values."
( cmpHint
"The `then` branch has type:"
"But the `else` branch is:"
[ "These need to match so that no matter which branch we take, we\
\ always get back the same type of value."
]
)
MultiIfBranch branchNumber region ->
report
(Just region)
( "The " ++ ordinalPair branchNumber
++ " branches of this `if` produce different types of values."
)
( cmpHint
("The " ++ Help.ordinalize (branchNumber - 1) ++ " branch has this type:")
("But the "++ Help.ordinalize branchNumber ++ " is:")
[ "All the branches of an `if` need to match so that no matter which\
\ one we take, we get back the same type of value overall."
]
)
If ->
report
Nothing
"All the branches of this `if` are consistent, but the overall\
\ type does not match how it is used elsewhere."
( cmpHint
"The `if` evaluates to something of type:"
"Which is fine, but the surrounding context wants it to be:"
[]
)
ListElement elementNumber region ->
report
(Just region)
("The " ++ ordinalPair elementNumber ++ " elements are different types of values.")
( cmpHint
("The " ++ Help.ordinalize (elementNumber - 1) ++ " element has this type:")
("But the "++ Help.ordinalize elementNumber ++ " is:")
[ "All elements should be the same type of value so that we can\
\ iterate through the list without running into unexpected values."
]
)
List ->
report
Nothing
( "All the elements in this list are the same type, but the overall\n"
++ "type does not match how it is used elsewhere."
)
( cmpHint
"The list has type:"
"Which is fine, but the surrounding context wants it to be:"
[]
)
BinopLeft op region ->
report
(Just region)
("The left argument of " ++ prettyName op ++ " is causing a type mismatch.")
( cmpHint
(prettyName op ++ " is expecting the left argument to be a:")
"But the left argument is:"
(binopHint op leftType rightType)
)
BinopRight op region ->
report
(Just region)
("The right argument of " ++ prettyName op ++ " is causing a type mismatch.")
( cmpHint
(prettyName op ++ " is expecting the right argument to be a:")
"But the right argument is:"
( binopHint op leftType rightType
++
[ "I always figure out the type of the left argument first and if it is\
\ acceptable on its own, I assume it is \"correct\" in subsequent checks.\
\ So the problem may actually be in how the left and right arguments interact."
]
)
)
Binop op ->
report
Nothing
( "The two arguments to " ++ prettyName op ++
" are fine, but the overall type of this expression\
\ does not match how it is used elsewhere."
)
( cmpHint
"The result of this binary operation is:"
"Which is fine, but the surrounding context wants it to be:"
[]
)
Function maybeName ->
report
Nothing
( "The return type of " ++ funcName maybeName ++ " is being used in unexpected ways."
)
( cmpHint
"The function results in this type of value:"
"Which is fine, but the surrounding context wants it to be:"
[]
)
UnexpectedArg maybeName 1 1 region ->
report
(Just region)
("The argument to " ++ funcName maybeName ++ " is causing a mismatch.")
( cmpHint
(Help.capitalize (funcName maybeName) ++ " is expecting the argument to be:")
"But it is:"
[]
)
UnexpectedArg maybeName index _totalArgs region ->
report
(Just region)
( "The " ++ Help.ordinalize index ++ " argument to " ++ funcName maybeName
++ " is causing a mismatch."
)
( cmpHint
( Help.capitalize (funcName maybeName) ++ " is expecting the "
++ Help.ordinalize index ++ " argument to be:"
)
"But it is:"
( if index == 1 then
[]
else
[ "I always figure out the type of arguments from left to right. If an argument\
\ is acceptable when I check it, I assume it is \"correct\" in subsequent checks.\
\ So the problem may actually be in how previous arguments interact with the "
++ Help.ordinalize index ++ "."
]
)
)
FunctionArity maybeName 0 actual region ->
let
arg =
if actual == 1 then "an argument" else show actual ++ " arguments"
preHint =
case maybeName of
Nothing ->
"You are giving " ++ arg ++ " to something that is not a function!"
Just name ->
prettyName name ++ " is not a function, but you are giving it " ++ arg ++ "!"
in
report
(Just region)
preHint
(text "Maybe you forgot some parentheses? Or a comma?")
FunctionArity maybeName expected actual region ->
let
s = if expected == 1 then "" else "s"
in
report
(Just region)
( Help.capitalize (funcName maybeName) ++ " is expecting " ++ show expected
++ " argument" ++ s ++ ", but was given " ++ show actual ++ "."
)
(text "Maybe you forgot some parentheses? Or a comma?")
BadTypeAnnotation name ->
report
Nothing
("The type annotation for " ++ Help.functionName name ++ " does not match its definition.")
( cmpHint
"The type annotation is saying:"
"But I am inferring that the definition has this type:"
[]
)
Instance name ->
report
Nothing
(Help.functionName name ++ " is being used in an unexpected way.")
( cmpHint
("Based on its definition, " ++ Help.functionName name ++ " has this type:")
"But you are trying to use it as:"
[]
)
Literal name ->
report
Nothing
( "This " ++ name ++ " value is being used as if it is some other type of value."
)
( cmpHint
("The " ++ name ++ " definitely has this type:")
("But it is being used as:")
[]
)
Pattern patErr ->
let
thing =
case patErr of
PVar name -> "variable `" ++ name ++ "`"
PAlias name -> "alias `" ++ name ++ "`"
PData name -> "tag `" ++ name ++ "`"
PRecord -> "a record"
in
report
Nothing
( Help.capitalize thing ++ " is causing problems in this pattern match."
)
( cmpHint
"This pattern matches things of type:"
"But the values it will actually be trying to match are:"
[]
)
Shader ->
report
Nothing
"There is some problem with this GLSL shader."
( cmpHint
"The shader block has this type:"
"Which is fine, but the surrounding context wants it to be:"
[]
)
Range ->
report
Nothing
"The low and high members of this list range are not the same type of value."
( cmpHint
"The low end of the range has type:"
"But the high end is:"
[]
)
Lambda ->
report
Nothing
"This anonymous function is being used in an unexpected way."
( cmpHint
"The anonymous function has type:"
"But you are trying to use it as:"
[]
)
Record ->
report
Nothing
"This record is being used in an unexpected way."
( cmpHint
"The record has type:"
"But you are trying to use it as:"
[]
)
comparisonHint
:: RenderType.Localizer
-> Type.Canonical
-> Type.Canonical
-> String
-> String
-> [Doc]
-> Doc
comparisonHint localizer leftType rightType leftWords rightWords finalHints =
let
(leftDoc, rightDoc) =
RenderType.diffToDocs localizer leftType rightType
in
Help.stack $
[ Help.reflowParagraph leftWords
, indent 4 leftDoc
, Help.reflowParagraph rightWords
, indent 4 rightDoc
]
++
finalHints
-- BINOP HINTS
binopHint :: Var.Canonical -> Type.Canonical -> Type.Canonical -> [String]
binopHint op leftType rightType =
let
leftString =
show (RenderType.toDoc Map.empty leftType)
rightString =
show (RenderType.toDoc Map.empty rightType)
in
if Var.is ["Basics"] "+" op && elem "String" [leftString, rightString] then
[ "To append strings in Elm, you need to use the (++) operator, not (+). "
++ "<http://package.elm-lang.org/packages/elm-lang/core/latest/Basics#++>"
]
else if Var.is ["Basics"] "/" op && elem "Int" [leftString, rightString] then
[ "The (/) operator is specifically for floating point division, and (//) is\
\ for integer division. You may need to do some conversions between ints and\
\ floats to get both arguments matching the division operator you want."
]
else
[]
-- MISMATCH HELPERS
ordinalPair :: Int -> String
ordinalPair number =
Help.ordinalize (number -1 ) ++ " and " ++ Help.ordinalize number
prettyName :: Var.Canonical -> String
prettyName (Var.Canonical _ opName) =
Help.functionName opName
funcName :: Maybe Var.Canonical -> String
funcName maybeVar =
case maybeVar of
Nothing ->
"this function"
Just var ->
"function " ++ prettyName var
-- MISMTACH REASONS
flipReason :: Reason -> Reason
flipReason reason =
case reason of
MessyFields leftOnly rightOnly ->
MessyFields rightOnly leftOnly
IntFloat ->
IntFloat
TooLongComparableTuple len ->
TooLongComparableTuple len
BadVar left right ->
BadVar right left
reasonToString :: Reason -> Maybe Doc
reasonToString reason =
let
go msg =
Just (toHint msg)
in
case reason of
MessyFields leftOnly rightOnly ->
do let typos = Help.findPotentialTypos leftOnly rightOnly
_ <- Help.vetTypos typos
misspellingMessage typos
IntFloat ->
go
"Elm does not automatically convert between Ints and Floats. Use\
\ `toFloat` and `round` to do specific conversions.\
\ <http://package.elm-lang.org/packages/elm-lang/core/latest/Basics#toFloat>"
TooLongComparableTuple len ->
go $
"Although tuples are comparable, this is currently only supported\
\ for tuples with 6 or fewer entries, not " ++ show len ++ "."
BadVar (Just Comparable) _ ->
go "Only ints, floats, chars, strings, lists, and tuples are comparable."
BadVar (Just Appendable) _ ->
go "Only strings, text, and lists are appendable."
BadVar (Just CompAppend) _ ->
go "Only strings and lists are both comparable and appendable."
BadVar (Just (Rigid _)) (Just (Rigid _)) ->
go doubleRigidError
BadVar (Just (Rigid _)) _ ->
go singleRigidError
BadVar _ (Just (Rigid _)) ->
go singleRigidError
BadVar _ _ ->
Nothing
singleRigidError :: String
singleRigidError =
"A type annotation is too generic. You can probably just switch to the\
\ type I inferred. These issues can be subtle though, so read more about it. "
++ Help.hintLink "type-annotations"
doubleRigidError :: String
doubleRigidError =
"A type annotation is clashing with itself or with a sub-annotation.\
\ This can be particularly tricky, so read more about it. "
++ Help.hintLink "type-annotations"
hintDoc :: Doc
hintDoc =
underline (text "Hint") <> colon
toHint :: String -> Doc
toHint str =
fillSep (hintDoc : map text (words str))
misspellingMessage :: [(String,String)] -> Maybe Doc
misspellingMessage typos =
if null typos then
Nothing
else
let
maxLen =
maximum (map (length . fst) typos)
in
Just $ hang 4 $ vcat $
toHint "I compared the record fields and found some potential typos."
: text ""
: map (pad maxLen) typos
pad :: Int -> (String, String) -> Doc
pad maxLen (leftField, rightField) =
text (replicate (maxLen - length leftField) ' ')
<> dullyellow (text leftField)
<+> text "<->"
<+> dullyellow (text rightField)
-- INFINITE TYPES
infiniteTypeToReport
:: RenderType.Localizer
-> String
-> Type.Canonical
-> Report.Report
infiniteTypeToReport localizer name overallType =
Report.report
"INFINITE TYPE"
Nothing
( "I am inferring a weird self-referential type for " ++ Help.functionName name
)
( Help.stack
[ Help.reflowParagraph $
"Here is my best effort at writing down the type. You will see ? and ∞ for\
\ parts of the type that repeat something already printed out infinitely."
, indent 4 (RenderType.toDoc localizer overallType)
, Help.reflowParagraph $
"Usually staring at the type is not so helpful in these cases, so definitely\
\ read the debugging hints for ideas on how to figure this out: "
++ Help.hintLink "infinite-type"
]
)
|
laszlopandy/elm-compiler
|
src/Reporting/Error/Type.hs
|
bsd-3-clause
| 18,809 | 0 | 23 | 6,691 | 3,273 | 1,664 | 1,609 | 439 | 31 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Filter operators for JSON values added to PostgreSQL 9.4
module Database.Persist.Postgresql.JSON
( (@>.)
, (<@.)
, (?.)
, (?|.)
, (?&.)
, Value()
) where
import Data.Aeson (FromJSON, ToJSON, Value, encode, eitherDecodeStrict)
import qualified Data.ByteString.Lazy as BSL
import Data.Proxy (Proxy)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding as TE (encodeUtf8)
import Database.Persist (EntityField, Filter(..), PersistValue(..), PersistField(..), PersistFilter(..))
import Database.Persist.Sql (PersistFieldSql(..), SqlType(..))
import Database.Persist.Types (FilterValue(..))
infix 4 @>., <@., ?., ?|., ?&.
-- | This operator checks inclusion of the JSON value
-- on the right hand side in the JSON value on the left
-- hand side.
--
-- === __Objects__
--
-- An empty Object matches any object
--
-- @
-- {} \@> {} == True
-- {"a":1,"b":false} \@> {} == True
-- @
--
-- Any key-value will be matched top-level
--
-- @
-- {"a":1,"b":{"c":true"}} \@> {"a":1} == True
-- {"a":1,"b":{"c":true"}} \@> {"b":1} == False
-- {"a":1,"b":{"c":true"}} \@> {"b":{}} == True
-- {"a":1,"b":{"c":true"}} \@> {"c":true} == False
-- {"a":1,"b":{"c":true"}} \@> {"b":{c":true}} == True
-- @
--
-- === __Arrays__
--
-- An empty Array matches any array
--
-- @
-- [] \@> [] == True
-- [1,2,"hi",false,null] \@> [] == True
-- @
--
-- Any array has to be a sub-set.
-- Any object or array will also be compared as being a subset of.
--
-- @
-- [1,2,"hi",false,null] \@> [1] == True
-- [1,2,"hi",false,null] \@> [null,"hi"] == True
-- [1,2,"hi",false,null] \@> ["hi",true] == False
-- [1,2,"hi",false,null] \@> ["hi",2,null,false,1] == True
-- [1,2,"hi",false,null] \@> [1,2,"hi",false,null,{}] == False
-- @
--
-- Arrays and objects inside arrays match the same way they'd
-- be matched as being on their own.
--
-- @
-- [1,"hi",[false,3],{"a":[null]}] \@> [{}] == True
-- [1,"hi",[false,3],{"a":[null]}] \@> [{"a":[]}] == True
-- [1,"hi",[false,3],{"a":[null]}] \@> [{"b":[null]}] == False
-- [1,"hi",[false,3],{"a":[null]}] \@> [[]] == True
-- [1,"hi",[false,3],{"a":[null]}] \@> [[3]] == True
-- [1,"hi",[false,3],{"a":[null]}] \@> [[true,3]] == False
-- @
--
-- A regular value has to be a member
--
-- @
-- [1,2,"hi",false,null] \@> 1 == True
-- [1,2,"hi",false,null] \@> 5 == False
-- [1,2,"hi",false,null] \@> "hi" == True
-- [1,2,"hi",false,null] \@> false == True
-- [1,2,"hi",false,null] \@> "2" == False
-- @
--
-- An object will never match with an array
--
-- @
-- [1,2,"hi",[false,3],{"a":null}] \@> {} == False
-- [1,2,"hi",[false,3],{"a":null}] \@> {"a":null} == False
-- @
--
-- === __Other values__
--
-- For any other JSON values the `(\@>.)` operator
-- functions like an equivalence operator.
--
-- @
-- "hello" \@> "hello" == True
-- "hello" \@> \"Hello" == False
-- "hello" \@> "h" == False
-- "hello" \@> {"hello":1} == False
-- "hello" \@> ["hello"] == False
--
-- 5 \@> 5 == True
-- 5 \@> 5.00 == True
-- 5 \@> 1 == False
-- 5 \@> 7 == False
-- 12345 \@> 1234 == False
-- 12345 \@> 2345 == False
-- 12345 \@> "12345" == False
-- 12345 \@> [1,2,3,4,5] == False
--
-- true \@> true == True
-- true \@> false == False
-- false \@> true == False
-- true \@> "true" == False
--
-- null \@> null == True
-- null \@> 23 == False
-- null \@> "null" == False
-- null \@> {} == False
-- @
--
-- @since 2.8.2
(@>.) :: EntityField record Value -> Value -> Filter record
(@>.) field val = Filter field (FilterValue val) $ BackendSpecificFilter " @> "
-- | Same as '@>.' except the inclusion check is reversed.
-- i.e. is the JSON value on the left hand side included
-- in the JSON value of the right hand side.
--
-- @since 2.8.2
(<@.) :: EntityField record Value -> Value -> Filter record
(<@.) field val = Filter field (FilterValue val) $ BackendSpecificFilter " <@ "
-- | This operator takes a column and a string to find a
-- top-level key/field in an object.
--
-- @column ?. string@
--
-- N.B. This operator might have some unexpected interactions
-- with non-object values. Please reference the examples.
--
-- === __Objects__
--
-- @
-- {"a":null} ? "a" == True
-- {"test":false,"a":500} ? "a" == True
-- {"b":{"a":[]}} ? "a" == False
-- {} ? "a" == False
-- {} ? "{}" == False
-- {} ? "" == False
-- {"":9001} ? "" == True
-- @
--
-- === __Arrays__
--
-- This operator will match an array if the string to be matched
-- is an element of that array, but nothing else.
--
-- @
-- ["a"] ? "a" == True
-- [["a"]] ? "a" == False
-- [9,false,"1",null] ? "1" == True
-- [] ? "[]" == False
-- [{"a":true}] ? "a" == False
-- @
--
-- === __Other values__
--
-- This operator functions like an equivalence operator on strings only.
-- Any other value does not match.
--
-- @
-- "a" ? "a" == True
-- "1" ? "1" == True
-- "ab" ? "a" == False
-- 1 ? "1" == False
-- null ? "null" == False
-- true ? "true" == False
-- 1.5 ? "1.5" == False
-- @
--
-- @since 2.10.0
(?.) :: EntityField record Value -> Text -> Filter record
(?.) = jsonFilter " ?? "
-- | This operator takes a column and a list of strings to
-- test whether ANY of the elements of the list are top
-- level fields in an object.
--
-- @column ?|. list@
--
-- /N.B. An empty list __will never match anything__. Also, this/
-- /operator might have some unexpected interactions with/
-- /non-object values. Please reference the examples./
--
-- === __Objects__
--
-- @
-- {"a":null} ?| ["a","b","c"] == True
-- {"test":false,"a":500} ?| ["a","b","c"] == True
-- {} ?| ["a","{}"] == False
-- {"b":{"a":[]}} ?| ["a","c"] == False
-- {"b":{"a":[]},"test":null} ?| [] == False
-- @
--
-- === __Arrays__
--
-- This operator will match an array if __any__ of the elements
-- of the list are matching string elements of the array.
--
-- @
-- ["a"] ?| ["a","b","c"] == True
-- [["a"]] ?| ["a","b","c"] == False
-- [9,false,"1",null] ?| ["a","false"] == False
-- [] ?| ["a","b","c"] == False
-- [] ?| [] == False
-- [{"a":true}] ?| ["a","b","c"] == False
-- [null,4,"b",[]] ?| ["a","b","c"] == True
-- @
--
-- === __Other values__
--
-- This operator functions much like an equivalence operator
-- on strings only. If a string matches with __any__ element of
-- the given list, the comparison matches. No other values match.
--
-- @
-- "a" ?| ["a","b","c"] == True
-- "1" ?| ["a","b","1"] == True
-- "ab" ?| ["a","b","c"] == False
-- 1 ?| ["a","1"] == False
-- null ?| ["a","null"] == False
-- true ?| ["a","true"] == False
-- "a" ?| [] == False
-- @
--
-- @since 2.10.0
(?|.) :: EntityField record Value -> [Text] -> Filter record
(?|.) field = jsonFilter " ??| " field . PostgresArray
-- | This operator takes a column and a list of strings to
-- test whether ALL of the elements of the list are top
-- level fields in an object.
--
-- @column ?&. list@
--
-- /N.B. An empty list __will match anything__. Also, this/
-- /operator might have some unexpected interactions with/
-- /non-object values. Please reference the examples./
--
-- === __Objects__
--
-- @
-- {"a":null} ?& ["a"] == True
-- {"a":null} ?& ["a","a"] == True
-- {"test":false,"a":500} ?& ["a"] == True
-- {"test":false,"a":500} ?& ["a","b"] == False
-- {} ?& ["{}"] == False
-- {"b":{"a":[]}} ?& ["a"] == False
-- {"b":{"a":[]},"c":false} ?& ["a","c"] == False
-- {"a":1,"b":2,"c":3,"d":4} ?& ["b","d"] == True
-- {} ?& [] == True
-- {"b":{"a":[]},"test":null} ?& [] == True
-- @
--
-- === __Arrays__
--
-- This operator will match an array if __all__ of the elements
-- of the list are matching string elements of the array.
--
-- @
-- ["a"] ?& ["a"] == True
-- ["a"] ?& ["a","a"] == True
-- [["a"]] ?& ["a"] == False
-- ["a","b","c"] ?& ["a","b","d"] == False
-- [9,"false","1",null] ?& ["1","false"] == True
-- [] ?& ["a","b"] == False
-- [{"a":true}] ?& ["a"] == False
-- ["a","b","c","d"] ?& ["b","c","d"] == True
-- [null,4,{"test":false}] ?& [] == True
-- [] ?& [] == True
-- @
--
-- === __Other values__
--
-- This operator functions much like an equivalence operator
-- on strings only. If a string matches with all elements of
-- the given list, the comparison matches.
--
-- @
-- "a" ?& ["a"] == True
-- "1" ?& ["a","1"] == False
-- "b" ?& ["b","b"] == True
-- "ab" ?& ["a","b"] == False
-- 1 ?& ["1"] == False
-- null ?& ["null"] == False
-- true ?& ["true"] == False
-- 31337 ?& [] == True
-- true ?& [] == True
-- null ?& [] == True
-- @
--
-- @since 2.10.0
(?&.) :: EntityField record Value -> [Text] -> Filter record
(?&.) field = jsonFilter " ??& " field . PostgresArray
jsonFilter :: PersistField a => Text -> EntityField record Value -> a -> Filter record
jsonFilter op field a = Filter field (UnsafeValue a) $ BackendSpecificFilter op
-----------------
-- AESON VALUE --
-----------------
instance PersistField Value where
toPersistValue = toPersistValueJsonB
fromPersistValue = fromPersistValueJsonB
instance PersistFieldSql Value where
sqlType = sqlTypeJsonB
-- FIXME: PersistText might be a bit more efficient,
-- but needs testing/profiling before changing it.
-- (When entering into the DB the type isn't as important as fromPersistValue)
toPersistValueJsonB :: ToJSON a => a -> PersistValue
toPersistValueJsonB = PersistDbSpecific . BSL.toStrict . encode
fromPersistValueJsonB :: FromJSON a => PersistValue -> Either Text a
fromPersistValueJsonB (PersistText t) =
case eitherDecodeStrict $ TE.encodeUtf8 t of
Left str -> Left $ fromPersistValueParseError "FromJSON" t $ T.pack str
Right v -> Right v
fromPersistValueJsonB (PersistByteString bs) =
case eitherDecodeStrict bs of
Left str -> Left $ fromPersistValueParseError "FromJSON" bs $ T.pack str
Right v -> Right v
fromPersistValueJsonB x = Left $ fromPersistValueError "FromJSON" "string or bytea" x
-- Constraints on the type might not be necessary,
-- but better to leave them in.
sqlTypeJsonB :: (ToJSON a, FromJSON a) => Proxy a -> SqlType
sqlTypeJsonB _ = SqlOther "JSONB"
fromPersistValueError :: Text -- ^ Haskell type, should match Haskell name exactly, e.g. "Int64"
-> Text -- ^ Database type(s), should appear different from Haskell name, e.g. "integer" or "INT", not "Int".
-> PersistValue -- ^ Incorrect value
-> Text -- ^ Error message
fromPersistValueError haskellType databaseType received = T.concat
[ "Failed to parse Haskell type `"
, haskellType
, "`; expected "
, databaseType
, " from database, but received: "
, T.pack (show received)
, ". Potential solution: Check that your database schema matches your Persistent model definitions."
]
fromPersistValueParseError :: (Show a)
=> Text -- ^ Haskell type, should match Haskell name exactly, e.g. "Int64"
-> a -- ^ Received value
-> Text -- ^ Additional error
-> Text -- ^ Error message
fromPersistValueParseError haskellType received err = T.concat
[ "Failed to parse Haskell type `"
, haskellType
, "`, but received "
, T.pack (show received)
, " | with error: "
, err
]
newtype PostgresArray a = PostgresArray [a]
instance PersistField a => PersistField (PostgresArray a) where
toPersistValue (PostgresArray ts) = PersistArray $ toPersistValue <$> ts
fromPersistValue (PersistArray as) = PostgresArray <$> traverse fromPersistValue as
fromPersistValue wat = Left $ fromPersistValueError "PostgresArray" "array" wat
|
naushadh/persistent
|
persistent-postgresql/Database/Persist/Postgresql/JSON.hs
|
mit
| 12,559 | 0 | 10 | 3,289 | 1,290 | 835 | 455 | 82 | 3 |
folder f a xs = foldM f a xs >> return ()
|
mpickering/hlint-refactor
|
tests/examples/Monad18.hs
|
bsd-3-clause
| 41 | 0 | 7 | 11 | 29 | 13 | 16 | 1 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-
A simple HTTP server that serves static source and data files
for tests, and additionally supports some dynamic responses:
-}
module Server (startServer) where
import Control.Concurrent
import qualified Control.Exception as E
import Control.Monad
import qualified Network.HTTP.Types as HTTP
import qualified Network.Wai
import qualified Network.Wai as W
import qualified Network.Wai.Application.Static as Static
import qualified Network.Wai.Handler.Warp as Warp
import qualified Network.Wai.Handler.WebSockets as WaiWS
import qualified Network.Wai.Parse as NWP
import Network.Socket
import qualified Network.WebSockets as WS
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Encoding.Error as TE
import qualified Data.Text.Lazy.Encoding as TLE
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import qualified Data.ByteString.Builder as B
import qualified Data.ByteString.Lazy as BL
import Data.Int
import Text.Read
import Prelude hiding (FilePath)
import Filesystem.Path
{-
Start the test server with static file root path on the next
available port number. Returns the port number
-}
startServer :: FilePath -> IO Int
startServer path = do
s <- socket AF_INET Stream defaultProtocol
bind s (SockAddrInet aNY_PORT iNADDR_ANY)
listen s 4
forkIO $
Warp.runSettingsSocket Warp.defaultSettings s
(WaiWS.websocketsOr WS.defaultConnectionOptions
handleWebSocket
(handleReq path))
fromIntegral <$> socketPort s
handleReq :: FilePath -> Network.Wai.Application
handleReq path req resp
| ["empty.html"] <- pi = handleEmpty req resp
-- | ("runmain.js":_) <- rpi = handleRunMain runMain req resp
| ("pong":_) <- pi = handlePong req resp
| ("status":_) <- pi = handleStatus req resp
| ("close":_) <- pi = handleClose req resp
| ("truncate":_) <- pi = handleTruncate req resp
| ("stream":_) <- pi = handleStream req resp
| otherwise = handleStatic path req resp
where
rpi = reverse pi
pi = Network.Wai.pathInfo req
handleStatic :: FilePath -> Network.Wai.Application
handleStatic path = Static.staticApp (Static.defaultFileServerSettings path)
handleEmpty :: Network.Wai.Application
handleEmpty req resp =
let d = "<html><head></head><body></body></html>"
l = BL.toStrict . B.toLazyByteString . B.int64Dec . BL.length $ d
in resp $ W.responseLBS HTTP.status200
[ ("Content-Type" , "text/html")
, ("Content-Length", l)
]
d
-- /**/runmain.js
-- serve a customized webdriver runner
{-
handleRunMain :: BL.ByteString -> Network.Wai.Application
handleRunMain runMain req resp =
resp $ W.responseLBS HTTP.status200
[ ("Content-Type", "application/javascript")
, ("Content-Length", BL.toStrict . B.toLazyByteString .
B.int64Dec . BL.length $ runMain)
]
runMain
-}
-- /pong: respond with same data as GET or POST data argument
handlePong :: Network.Wai.Application
handlePong req resp
| W.requestMethod req == HTTP.methodGet = f (queryString' req)
| W.requestMethod req == HTTP.methodPost =
NWP.parseRequestBody NWP.lbsBackEnd req >>= f . fst
| otherwise = invalidMethod resp
where
f q = let d = maybe "pong" BL.fromStrict (lookup "data" q)
in respondWith resp HTTP.status200 q (BL.length d) d
-- /status/CODE: respond with status code, reply with POST body
-- or data argument, default body if none
handleStatus :: Network.Wai.Application
handleStatus req resp
| W.requestMethod req == HTTP.methodGet = f (queryString' req)
| W.requestMethod req == HTTP.methodPost =
NWP.parseRequestBody NWP.lbsBackEnd req >>= f . fst
| otherwise = invalidMethod resp
where
s | (_:code:_) <- Network.Wai.pathInfo req
, Just c0 <- readMaybeT code = HTTP.mkStatus c0 "Status"
| otherwise = HTTP.status200
f q = let d = maybe "pong" BL.fromStrict (lookup "data" q)
in respondWith resp s q (BL.length d) d
-- /close/DELAY: close the connection without a response after DELAY ms
handleClose :: Network.Wai.Application
handleClose req respond
| (_:delay:_) <- W.pathInfo req
, Just ms <- readMaybeT delay = f ms
| otherwise = f 0
where
f d = do
threadDelay (d*1000)
-- fixme check that this closes the connection
respond $ W.responseLBS (error "no status") [] ""
-- /truncate/BYTES: claims to reply with 2*BYTES response, but
-- closes the connection after sending BYTES bytes
-- default value: 32kiB with 64kiB content length
handleTruncate :: Network.Wai.Application
handleTruncate req resp
| W.requestMethod req == HTTP.methodGet = f (queryString' req)
| W.requestMethod req == HTTP.methodPost =
NWP.parseRequestBody NWP.lbsBackEnd req >>= f . fst
| otherwise = invalidMethod resp
where
l | (_:bytes:_) <- W.pathInfo req
, Just c0 <- readMaybeT bytes = c0
| otherwise = 32768
d = "abcdefghijklmnopqrstuvwxyz1234567890"
f q = respondWith resp HTTP.status200 q (2*l) (BL.take l $ BL.cycle d)
-- /stream/CHUNKSIZE: sends an infinite stream of chunks of size
-- CHUNKSIZE, use delay (ms) for the delay
-- between chunks
handleStream :: Network.Wai.Application
handleStream req resp
| W.requestMethod req == HTTP.methodGet = f (queryString' req)
| W.requestMethod req == HTTP.methodPost =
NWP.parseRequestBody NWP.lbsBackEnd req >>= f . fst
| otherwise = invalidMethod resp
where
chunkSize | (_:cs0:_) <- W.pathInfo req
, Just cs <- readMaybeT cs0 = cs
| otherwise = 32678
d = B.lazyByteString $
BL.take chunkSize (BL.cycle "abcdefghijklmnopqrstuvwxyz1234567890")
hdrs = [("Content-Type", "text/plain")]
f q = do
let delay = maybe (return ()) threadDelay
(readMaybeB =<< lookup "delay" q)
resp $ W.responseStream HTTP.status200 hdrs $ \write flush ->
forever (write d >> flush >> delay)
handleWebSocket :: WS.ServerApp
handleWebSocket pending = do
putStrLn "accepting WebSocket request"
conn <- WS.acceptRequest pending
let handleMessages = forever $ do
d <- WS.receiveDataMessage conn
case d of
WS.Text t -> do
putStrLn "received text message"
case reads . TL.unpack . TLE.decodeUtf8 $ t of
[(i, [])] -> case i of
0 -> do
putStrLn "closing connection"
WS.sendClose conn (""::T.Text)
_ | i < 0 -> replicateM_ (negate i) $
WS.sendDataMessage conn (WS.Text "TestTextMessage")
_ -> replicateM_ i $
WS.sendDataMessage conn (WS.Binary "TestBinaryMessage")
_ -> putStrLn "received non-numeric message"
WS.Binary bs ->
putStrLn "received binary message"
handleConnectionException :: WS.ConnectionException -> IO ()
handleConnectionException = print
handleMessages `E.catch` handleConnectionException
----
respondWith :: (W.Response -> IO W.ResponseReceived) -> HTTP.Status -> [(ByteString, ByteString)] -> Int64 -> BL.ByteString -> IO W.ResponseReceived
respondWith respond status query contentLength content = do
maybe (return ()) threadDelay (readMaybeB =<< lookup "delay" query)
let ct = fromMaybe "text/plain" (lookup "content-type" query)
hdrs = [ ("Content-Type", ct)
, ("Content-Length", BL.toStrict . B.toLazyByteString . B.int64Dec $ contentLength)
]
respond (W.responseLBS status hdrs content)
queryString' :: W.Request -> [(ByteString, ByteString)]
queryString' = mapMaybe sequence . W.queryString
readMaybeB :: Read a => ByteString -> Maybe a
readMaybeB = readMaybeT . TE.decodeUtf8With TE.lenientDecode
readMaybeT :: Read a => Text -> Maybe a
readMaybeT = readMaybe . T.unpack
invalidMethod :: (W.Response -> IO W.ResponseReceived) -> IO W.ResponseReceived
invalidMethod respond = respond $
W.responseLBS HTTP.methodNotAllowed405 [] "Method not allowed"
|
ryantrinkle/ghcjs
|
test/Server.hs
|
mit
| 8,755 | 0 | 29 | 2,369 | 2,257 | 1,151 | 1,106 | 156 | 5 |
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Util.Themes
-- Copyright : (C) 2007 Andrea Rossato
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : unstable
-- Portability : unportable
--
-- A (hopefully) growing collection of themes for decorated layouts.
--
-----------------------------------------------------------------------------
module XMonad.Util.Themes
( -- * Usage
-- $usage
listOfThemes
, ppThemeInfo
, xmonadTheme
, smallClean
, robertTheme
, deiflTheme
, oxymor00nTheme
, donaldTheme
, wfarrTheme
, kavonForestTheme
, kavonLakeTheme
, kavonPeacockTheme
, kavonVioGreenTheme
, kavonBluesTheme
, kavonAutumnTheme
, kavonFireTheme
, kavonChristmasTheme
, ThemeInfo (..)
) where
import XMonad.Layout.Decoration
-- $usage
-- This module stores some user contributed themes which can be used
-- with decorated layouts (such as Tabbed). (Note that these themes
-- only apply to decorated layouts, such as those found in
-- "XMonad.Layout.Tabbed" and "XMonad.Layout.DecorationMadness"; they
-- do not apply to xmonad as a whole.)
--
-- If you want to use one of them with one of your decorated layouts,
-- you need to substitute defaultTheme with, for instance, (theme
-- smallClean).
--
-- Here is an example:
--
-- > import XMonad
-- > import XMonad.Util.Themes
-- > import XMonad.Layout.Tabbed
-- >
-- > myLayout = tabbed shrinkText (theme smallClean)
-- >
-- > main = xmonad defaultConfig {layoutHook = myLayout}
--
-- If you have a theme you would like to share, adding it to this
-- module is very easy.
--
-- You can use 'xmonadTheme' or 'smallClean' as a template.
--
-- At the present time only the 'themeName' field is used. But please
-- provide all the other information, which will be used at a later
-- time.
--
-- Please, remember to add your theme to the list of exported
-- functions, and to the 'listOfThemes'.
--
-- Thanks for your contribution!
data ThemeInfo =
TI { themeName :: String
, themeAuthor :: String
, themeDescription :: String
, theme :: Theme
}
newTheme :: ThemeInfo
newTheme = TI "" "" "" defaultTheme
ppThemeInfo :: ThemeInfo -> String
ppThemeInfo t = themeName t <> themeDescription t <> "by" <> themeAuthor t
where "" <> x = x
x <> y = x ++ " - " ++ y
listOfThemes :: [ThemeInfo]
listOfThemes = [ xmonadTheme
, smallClean
, deiflTheme
, oxymor00nTheme
, robertTheme
, donaldTheme
, wfarrTheme
, kavonForestTheme
, kavonLakeTheme
, kavonPeacockTheme
, kavonVioGreenTheme
, kavonBluesTheme
, kavonAutumnTheme
, kavonFireTheme
, kavonChristmasTheme
]
-- | The default xmonad theme, by David Roundy.
xmonadTheme :: ThemeInfo
xmonadTheme =
newTheme { themeName = "xmonadTheme"
, themeAuthor = "David Roundy"
, themeDescription = "The default xmonad theme"
, theme = defaultTheme
}
-- | Small decorations with a Ion3 remembrance, by Andrea Rossato.
smallClean :: ThemeInfo
smallClean =
newTheme { themeName = "smallClean"
, themeAuthor = "Andrea Rossato"
, themeDescription = "Small decorations with a Ion3 remembrance"
, theme = defaultTheme { activeColor = "#8a999e"
, inactiveColor = "#545d75"
, activeBorderColor = "white"
, inactiveBorderColor = "grey"
, activeTextColor = "white"
, inactiveTextColor = "grey"
, decoHeight = 14
}
}
-- | Don's preferred colors - from DynamicLog...;)
donaldTheme :: ThemeInfo
donaldTheme =
newTheme { themeName = "donaldTheme"
, themeAuthor = "Andrea Rossato"
, themeDescription = "Don's preferred colors - from DynamicLog...;)"
, theme = defaultTheme { activeColor = "#2b4f98"
, inactiveColor = "#cccccc"
, activeBorderColor = "#2b4f98"
, inactiveBorderColor = "#cccccc"
, activeTextColor = "white"
, inactiveTextColor = "black"
, decoHeight = 16
}
}
-- | Ffrom Robert Manea's prompt theme.
robertTheme :: ThemeInfo
robertTheme =
newTheme { themeName = "robertTheme"
, themeAuthor = "Andrea Rossato"
, themeDescription = "From Robert Manea's prompt theme"
, theme = defaultTheme { activeColor = "#aecf96"
, inactiveColor = "#111111"
, activeBorderColor = "#aecf96"
, inactiveBorderColor = "#111111"
, activeTextColor = "black"
, inactiveTextColor = "#d5d3a7"
, fontName = "-*-profont-*-*-*-*-11-*-*-*-*-*-iso8859"
, decoHeight = 16
}
}
-- | deifl\'s Theme, by deifl.
deiflTheme :: ThemeInfo
deiflTheme =
newTheme { themeName = "deiflTheme"
, themeAuthor = "deifl"
, themeDescription = "deifl's Theme"
, theme = defaultTheme { inactiveBorderColor = "#708090"
, activeBorderColor = "#5f9ea0"
, activeColor = "#000000"
, inactiveColor = "#333333"
, inactiveTextColor = "#888888"
, activeTextColor = "#87cefa"
, fontName = "-xos4-terminus-*-*-*-*-12-*-*-*-*-*-*-*"
, decoHeight = 15
}
}
-- | oxymor00n\'s theme, by Tom Rauchenwald.
oxymor00nTheme :: ThemeInfo
oxymor00nTheme =
newTheme { themeName = "oxymor00nTheme"
, themeAuthor = "Tom Rauchenwald"
, themeDescription = "oxymor00n's theme"
, theme = defaultTheme { inactiveBorderColor = "#000"
, activeBorderColor = "aquamarine3"
, activeColor = "aquamarine3"
, inactiveColor = "DarkSlateGray4"
, inactiveTextColor = "#222"
, activeTextColor = "#222"
-- This font can be found in the package ttf-alee
-- on debian-systems
, fontName = "-*-Bandal-*-*-*-*-12-*-*-*-*-*-*-*"
, decoHeight = 15
, urgentColor = "#000"
, urgentTextColor = "#63b8ff"
}
}
wfarrTheme :: ThemeInfo
wfarrTheme =
newTheme { themeName = "wfarrTheme"
, themeAuthor = "Will Farrington"
, themeDescription = "A nice blue/black theme."
, theme = defaultTheme { activeColor = "#4c7899"
, inactiveColor = "#333333"
, activeBorderColor = "#285577"
, inactiveBorderColor = "#222222"
, activeTextColor = "#ffffff"
, inactiveTextColor = "#888888"
, fontName = "-*-fixed-medium-r-*--10-*-*-*-*-*-iso8859-1"
, decoHeight = 12
}
}
-- | Forest colours, by Kathryn Andersen
kavonForestTheme :: ThemeInfo
kavonForestTheme =
newTheme { themeName = "kavonForestTheme"
, themeAuthor = "Kathryn Andersen"
, themeDescription = "Forest colours"
, theme = defaultTheme { activeColor = "#115422"
, activeBorderColor = "#1a8033"
, activeTextColor = "white"
, inactiveColor = "#543211"
, inactiveBorderColor = "#804c19"
, inactiveTextColor = "#ffcc33"
}
}
-- | Lake (blue/green) colours, by Kathryn Andersen
kavonLakeTheme :: ThemeInfo
kavonLakeTheme =
newTheme { themeName = "kavonLakeTheme"
, themeAuthor = "Kathryn Andersen"
, themeDescription = "Lake (blue/green) colours"
, theme = defaultTheme { activeColor = "#001166"
, activeBorderColor = "#1f3999"
, activeTextColor = "white"
, inactiveColor = "#09592a"
, inactiveBorderColor = "#198044"
, inactiveTextColor = "#73e6a3"
}
}
-- | Peacock colours, by Kathryn Andersen
kavonPeacockTheme :: ThemeInfo
kavonPeacockTheme =
newTheme { themeName = "kavonPeacockTheme"
, themeAuthor = "Kathryn Andersen"
, themeDescription = "Peacock colours"
, theme = defaultTheme { activeColor = "#190f4c"
, activeBorderColor = "#2b1980"
, activeTextColor = "white"
, inactiveColor = "#225173"
, inactiveBorderColor = "#2a638c"
, inactiveTextColor = "#8fb2cc"
}
}
-- | Violet-Green colours, by Kathryn Andersen
kavonVioGreenTheme :: ThemeInfo
kavonVioGreenTheme =
newTheme { themeName = "kavonVioGreenTheme"
, themeAuthor = "Kathryn Andersen"
, themeDescription = "Violet-Green colours"
, theme = defaultTheme { activeColor = "#37174c"
, activeBorderColor = "#333399"
, activeTextColor = "white"
, inactiveColor = "#174c17"
, inactiveBorderColor = "#336633"
, inactiveTextColor = "#aaccaa"
}
}
-- | Blue colours, by Kathryn Andersen
kavonBluesTheme :: ThemeInfo
kavonBluesTheme =
newTheme { themeName = "kavonBluesTheme"
, themeAuthor = "Kathryn Andersen"
, themeDescription = "Blue colours"
, theme = defaultTheme { activeColor = "#000066"
, activeBorderColor = "#111199"
, activeTextColor = "white"
, inactiveColor = "#9999ee"
, inactiveBorderColor = "#6666cc"
, inactiveTextColor = "black"
}
}
-- | Christmas colours, by Kathryn Andersen
kavonChristmasTheme :: ThemeInfo
kavonChristmasTheme =
newTheme { themeName = "kavonChristmasTheme"
, themeAuthor = "Kathryn Andersen"
, themeDescription = "Christmas (green + red) colours"
, theme = defaultTheme { activeColor = "#660000"
, activeBorderColor = "#990000"
, activeTextColor = "white"
, inactiveColor = "#006600"
, inactiveBorderColor = "#003300"
, inactiveTextColor = "#99bb99"
}
}
-- | Autumn colours, by Kathryn Andersen
kavonAutumnTheme :: ThemeInfo
kavonAutumnTheme =
newTheme { themeName = "kavonAutumnTheme"
, themeAuthor = "Kathryn Andersen"
, themeDescription = "Autumn (brown + red) colours"
, theme = defaultTheme { activeColor = "#660000"
, activeBorderColor = "#990000"
, activeTextColor = "white"
, inactiveColor = "#542d11"
, inactiveBorderColor = "#804d1A"
, inactiveTextColor = "#ffcc33"
}
}
-- | Fire colours, by Kathryn Andersen
kavonFireTheme :: ThemeInfo
kavonFireTheme =
newTheme { themeName = "kavonFireTheme"
, themeAuthor = "Kathryn Andersen"
, themeDescription = "Fire (orange + red) colours"
, theme = defaultTheme { activeColor = "#660000"
, activeBorderColor = "#990000"
, activeTextColor = "white"
, inactiveColor = "#ff8000"
, inactiveBorderColor = "#d9b162"
, inactiveTextColor = "black"
}
}
|
adinapoli/xmonad-contrib
|
XMonad/Util/Themes.hs
|
bsd-3-clause
| 15,650 | 0 | 8 | 7,897 | 1,462 | 959 | 503 | 220 | 2 |
import System.Environment
main :: IO ()
main = do
[arg] <- getArgs
env <- getEnvironment
putStrLn "Running Background App."
putStrLn $ " Args: " ++ show arg
putStrLn $ "Environment: " ++ show env
putStrLn "Stopping Background App."
|
telser/keter
|
incoming/foo1_0/worker.hs
|
mit
| 265 | 0 | 8 | 71 | 78 | 35 | 43 | 9 | 1 |
{-# LANGUAGE ApplicativeDo #-}
f :: Int -> IO Int
f x = do
y <- return (x + 1)
return (y * 2)
|
sdiehl/ghc
|
testsuite/tests/ghci.debugger/scripts/break029.hs
|
bsd-3-clause
| 99 | 0 | 10 | 29 | 52 | 25 | 27 | 5 | 1 |
module T4007 where
f :: IO ()
f = sequence_ (replicate 10 (putStrLn "yes"))
|
urbanslug/ghc
|
testsuite/tests/perf/compiler/T4007.hs
|
bsd-3-clause
| 78 | 0 | 9 | 16 | 36 | 19 | 17 | 3 | 1 |
-- Test for trac #1042
import Control.Exception
import Data.Int
main :: IO ()
main = do print ((minBound :: Int) `div` (-1)) `myCatch` print
print ((minBound :: Int8) `div` (-1)) `myCatch` print
print ((minBound :: Int16) `div` (-1)) `myCatch` print
print ((minBound :: Int32) `div` (-1)) `myCatch` print
print ((minBound :: Int64) `div` (-1)) `myCatch` print
myCatch :: IO a -> (ArithException -> IO a) -> IO a
myCatch = catch
|
urbanslug/ghc
|
testsuite/tests/numeric/should_run/numrun013.hs
|
bsd-3-clause
| 476 | 0 | 12 | 118 | 221 | 127 | 94 | 10 | 1 |
-- | Gearman specific stuff
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Vaultaire.Collector.Nagios.Perfdata.Gearman where
import Vaultaire.Collector.Nagios.Perfdata.Process
import Vaultaire.Collector.Nagios.Perfdata.Types
import Control.Concurrent
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Reader
import Control.Monad.State
import Crypto.Cipher.AES
import qualified Data.ByteString as S
import qualified Data.ByteString.Base64 as B64
import qualified Data.ByteString.Lazy.Char8 as L
import System.Log.Logger
import Data.Nagios.Perfdata
import System.Gearman.Connection
import System.Gearman.Worker
import Vaultaire.Collector.Common.Process
import Vaultaire.Collector.Common.Types
gearmanProcessDatum :: CollectorOpts NagiosOptions -> CollectorState NagiosState -> WorkerFunc
gearmanProcessDatum o@(_, NagiosOptions{..}) s@(_, NagiosState{..}) Job{..} =
case clearBytes collectorAES jobData of
Left e -> liftIO $ do
errorM "Gearman.gearmanProcessDatum" $ concat ["error decoding: ", show e, " data: ", show jobData]
return $ Left . Just $ L.pack e
Right checkResult -> do
liftIO $ debugM "Gearman.gearmanProcessDatum" $ "Null trimmed data: " ++ (show . trimNulls) checkResult
case perfdataFromGearmanResult checkResult of
Left err -> liftIO $ do
errorM "Gearman.gearmanProcessDatum" $ "Error parsing check result: " ++ err
return $ Left $ Just (L.pack err)
Right datum -> do
liftIO $ debugM "Gearman.gearmanProcessDatum" $ "Got datum: " ++ show datum
_ <- runCollector' o s (return ()) $ processDatum datum
return $ Right "done"
where
clearBytes k d = decodeJob k $ L.toStrict d
trimNulls :: S.ByteString -> S.ByteString
trimNulls = S.reverse . S.dropWhile (0 ==) . S.reverse
-- | Decodes a job's data packet using Base 64
decodeJob :: Maybe AES -> S.ByteString -> Either String S.ByteString
decodeJob k d = case B64.decode d of
Right d' -> Right $ maybeDecrypt k d'
Left e -> Left e
-- | Possible decrypts payload (based on whether key is given)
maybeDecrypt :: Maybe AES -> S.ByteString -> S.ByteString
maybeDecrypt aes ciphertext = case aes of
Nothing -> ciphertext -- Nothing to do, we assume the input is already in cleartext.
Just k -> decryptECB k ciphertext
-- | Sets up the gearman worker daemon and runs a work loop
setupGearman :: Nagios ()
setupGearman = do
o <- ask
s@(_, NagiosState{..}) <- get
let workFunc = gearmanProcessDatum o s
(CommonOpts{..}, opts@NagiosOptions{..}) <- ask
disconnectErrorBox <- liftIO newEmptyMVar
setupConnection disconnectErrorBox workFunc opts
liftIO $ forever $ do
err <- liftIO $ takeMVar disconnectErrorBox
warningM "Gearman.setupGearman" $ concat ["Worker thread disconnected from gearmanServer with: ", err, " starting new connection"]
setupConnection disconnectErrorBox workFunc opts
where
setupConnection box workFunc NagiosOptions{..} = liftIO $ forkIO $ runGearman optGearmanHost optGearmanPort $ do
err <- work [(L.pack optFunctionName, workFunc, Nothing)]
liftIO $ putMVar box err
|
anchor/vaultaire-collector-nagios
|
lib/Vaultaire/Collector/Nagios/Perfdata/Gearman.hs
|
mit
| 3,503 | 0 | 20 | 905 | 867 | 446 | 421 | 62 | 3 |
module Main where
sum' :: (Num a) => [a] -> a
sum' xs = foldl (\acc x -> acc + x) 0 xs
sum2 :: (Num a) => [a] -> a
sum2 = foldl (+) 0
|
rglew/lyah
|
foldl.hs
|
mit
| 137 | 0 | 8 | 39 | 87 | 49 | 38 | 5 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- | This module handles all of the scans requested by the user
module Scans (attachScanEvents, initScanState, scanShape,
ScanState(step,rotations,top,bottom,offset,choice),
scansReady, populateTable,dropScan,updateTitle,toFile,MouseState) where
import Data.IORef
import Data.List (delete,intercalate)
import Haste
import Haste.DOM
import Haste.JSON
import Haste.Events
import Haste.Graphics.Canvas
import Control.Monad (forM,forM_,(>=>))
import Text.Printf
import Prelude hiding (head, tail, init, last, read,(!!))
import Safe (headMay,atMay)
import JSON
data Scan = Scan {start :: Point,
stop :: Point,
title :: String}
deriving (Show, Eq)
instance JSONable Scan where
toJSON s = Dict [("title",Str . toJSString$ title s),
("points",Arr . map toJSON $ [start s,stop s])]
fromJSON d@(Dict _) = Scan <$> (getJArr d "points" >>= headMay >>= fromJSON)<*>(getJArr d "points" >>= flip atMay 1 >>= fromJSON) <*> ((d ~> "title") >>= fromJSONStr)
fromJSON _ = Nothing
fromJSONStr :: JSON -> Maybe String
fromJSONStr (Str x) = Just (toString x)
fromJSONStr _ = Nothing
getJArr :: JSON -> JSString -> Maybe [JSON]
getJArr d k = case d ~> k of
Nothing -> Nothing
Just (Arr x) -> Just x
Just _ -> Nothing
-- | Whether the user is currently performing a drag or leaving the mouse free
data MouseState = Free | Dragging
deriving (Show,Eq)
instance JSONable MouseState where
toJSON = Str . toJSString . show
fromJSON (Str x) = case fromJSStr x of
"Dragging" -> Just Dragging
"Free" -> Just Free
_ -> Nothing
fromJSON _ = Nothing
data Frame = Top | Bottom
deriving (Eq, Show, Read)
instance JSONable Frame where
toJSON =Str . toJSString .show
fromJSON (Str x) = case fromJSStr x of
"Top" -> Just Top
"Bottom" -> Just Bottom
_ -> Nothing
fromJSON _ = Nothing
-- | The complete state of the user's scanning selections
data ScanState = ScanState {mouse :: MouseState, -- ^ whether a new
-- scan is currently
-- being created.
scans :: [Scan], -- ^ The scans that the
-- user has requested.
top :: Double, -- ^ The Y offset of the
-- upper frame
bottom :: Double, -- ^ The Y offset of the
-- lower frame
offset :: Double, -- ^ The X offset of the
-- frames
choice :: Frame, -- ^ Which frame position
-- holds the sample.
step :: Double, -- ^ The Scan step size in mm
rotations :: [Double]} -- ^ The rotation
-- angles that we
-- wish to measure
deriving (Eq,Show)
instance JSONable ScanState where
toJSON s = Dict . zip ["mouse","scans","top","bottom","offset","choice","step","rotations"] $ [toJSON $ mouse s,toJSON $ scans s, toJSON $ top s, toJSON $ bottom s, toJSON $ offset s, toJSON $ choice s, toJSON $ step s, toJSON $ rotations s]
fromJSON d = ScanState <$> (d ~~> "mouse")
<*> (d ~~> "scans")
<*> ((d ~> "top") >>= fromJSON)
<*> ((d ~> "bottom") >>= fromJSON)
<*> ((d ~> "offset") >>= fromJSON)
<*> ((d ~> "choice") >>= fromJSON)
<*> defaultStep d
<*> ((d ~> "rotations") >>= fromJSON)
defaultStep :: JSON -> Maybe Double
defaultStep d =
case d ~> "step" of
Just x -> fromJSON x
Nothing -> Just 0.1 -- Default step size from V0.1
defaultScanState :: ScanState
defaultScanState = ScanState Free [] 0 50 0 Top 0.5 (map (*(pi/180)) [0,5..50])
-- | Creates a reference to a set of scans
initScanState :: IO (IORef ScanState)
initScanState = newIORef defaultScanState
makeFree :: ScanState -> ScanState
makeFree st = st{mouse=Free}
-- | Registers actions on the scan canvas
attachScanEvents :: IORef ScanState -- ^ A reference to the global
-- state of the scan
-> Canvas -- ^ The canvas being registered
-> IO () -- ^ A generic update to perform after any event
-> IO ()
attachScanEvents scanState can action = do
_ <- onEvent can MouseDown $ mouseDown action scanState
_ <- onEvent can MouseUp $ mouseUp action scanState
_ <- onEvent can MouseMove $ mouseMove action scanState
return ()
mouseUp :: IO () -> IORef ScanState -> MouseData -> IO ()
mouseUp action state m = do
modifyIORef' state $ makeFree . updateHead m
action
mouseMove :: IO () -> IORef ScanState -> MouseData -> IO ()
mouseMove action state m = do
modifyIORef' state $ updateHead m
action
updateHead :: MouseData -> ScanState -> ScanState
updateHead m st
| mouse st == Free = st
| null (scans st) = st
| otherwise =
let
s:ss = scans st
in
st{scans= axisScan (start s) (floatPair $ mouseCoords m):ss}
axisScan :: Point -> Point -> Scan
axisScan p p2 = Scan p (ending p p2) ""
where
ending (xa,ya) (xb,yb) =
if abs (yb - ya) > abs (xb - xa)
then (xa, yb)
else (xb, ya)
mouseDown :: IO () -> IORef ScanState -> MouseData -> IO ()
mouseDown action state m = do
modifyIORef' state $ \x -> let p = floatPair (mouseCoords m)
in startDrag p x
action
startDrag :: Point -> ScanState -> ScanState
startDrag p st = st{mouse=Dragging,scans=Scan p p "":scans st}
-- | Returns a picture with the scans coloured Magenta
scanShape :: ScanState -> Picture ()
scanShape st = lineWidth 1 . color (RGB 255 0 255) . stroke $ forM_ (scans st) (\(Scan a b _) -> line a b)
floatPair :: (Int, Int) -> Point
floatPair (x,y) = (fromIntegral x, fromIntegral y)
type Killer = Scan -> IO ()
type Changer = Elem -> Scan -> IO ()
-- | Add a table to the HTML document which contains the scans
populateTable :: Changer -- ^ An action which updates the a scan title
-- in the global state with the value in an
-- element
-> Killer -- ^ An action which removes a scan from the
-- global state
-> ScanState -- ^ The current state of the scan
-> Elem -- ^ where to place the table
-> IO ()
populateTable c k st e = do
clearChildren e
header <- makeTableHeader
appendChild e header
_ <- forM (reverse $ scans st) (makeScanRow c k st >=> appendChild e)
return ()
makeTableHeader :: IO Elem
makeTableHeader = do
hs <- mapM makeTableHeader' ["x1","y1","x2","y2","frames","time (minutes)","title","Delete"]
newElem "tr" `with` [children hs]
makeTableHeader' :: String -> IO Elem
makeTableHeader' x = do
txt <- newTextElem x
newElem "th" `with` [children [txt]]
makeTableRow :: (Show a) => [a] -> IO Elem
makeTableRow xs = do
texts <- mapM makeTableCell xs
let cell tx = with (newElem "td") [children [tx]]
cells <- mapM cell texts
with (newElem "tr") [children cells]
makeTableCell :: Show a => a -> IO Elem
makeTableCell x = do
txt <- newTextElem $ show x
with (newElem "td") [children [txt]]
makeScanRow :: Changer -> Killer -> ScanState -> Scan -> IO Elem
makeScanRow c k st sc@(Scan (xa, ya) (xb, yb) t) = do
let toReal = (/900) . (*25)
row <- makeTableRow [toReal xa, toReal ya, toReal xb, toReal yb,
fromIntegral $ getFrameCount (step st) sc,
fromIntegral . round . (*(fromIntegral . length $ rotations st)) . (*(3.5/60)) . fromIntegral
. getFrameCount (step st) $ sc]
titleLabel <- makeTitleLabel t
deleteButton <- makeDeleteButton
appendChild row =<< inCell titleLabel
appendChild row deleteButton
_ <- onEvent deleteButton Click $ const (k sc)
_ <- onEvent titleLabel Change $ const (c titleLabel sc)
return row
inCell :: Elem -> IO Elem
inCell t = newElem "td" `with` [children [t]]
makeTitleLabel :: String -> IO Elem
makeTitleLabel s = newElem "input" `with` [attr "type" =: "text",
attr "value" =: s]
makeDeleteButton :: IO Elem
makeDeleteButton = do
icon <- newElem "span" `with` [attr "class" =: "glyphicon glyphicon-remove"]
newElem "button" `with` [attr "class" =: "btn btn-danger",
children [icon]]
-- | Given a generic continuation action and a reference to the global
-- scan state, creates a function which will remove a given scan from
-- the state and perform the update continuation.
dropScan :: IO () -> IORef ScanState -> Killer
dropScan action scanState s = do
modifyIORef' scanState (\x -> x{scans = delete s $scans x})
action
-- | Given a generic continuation action and a reference to the global
-- scan state, creates a function which will update any chosen scan
-- with the value of a form element
updateTitle :: IO () -> IORef ScanState -> Changer
updateTitle action scanState label scan = do
l <- getProp label "value"
modifyIORef' scanState (fixScanState scan (\x -> x{title=l}))
action
when :: (a -> Bool) -> (a->a) -> [a] -> [a]
when _ _ [] = []
when test f (x:xs) = if test x
then f x:when test f xs
else x:when test f xs
fixScanState :: Scan -> (Scan->Scan) -> ScanState -> ScanState
fixScanState scan f s =
let ss = scans s
in s{scans=when (==scan) f ss}
newline :: String
newline = "\r\n"
-- | Turns a ScanState into a script macro for SPEC
toFile :: ScanState -> String
toFile s = intercalate (newline ++ newline) (map (scanRot s) (rotations s))
scanRot :: ScanState -> Double -> String
scanRot s angle = "umv sar " ++ show (round $ angle*180/pi) ++ newline ++ (intercalate newline . map (fileLineScan s angle) . reverse . scans $ s)
data ScanDir = Horizontal | Vertical
fileLineScan :: ScanState -> Double -> Scan -> String
fileLineScan s angle sc@(Scan (xa, _) (xb, _) _)
| xa == xb = scanCommand Vertical s sc angle
| otherwise = scanCommand Horizontal s sc angle
getFrameCount :: Double -> Scan -> Int
getFrameCount stepSize (Scan (xa, ya) (xb, yb) _)
| xa == xb = getSteps stepSize ya yb
| otherwise = getSteps stepSize xa xb
getSteps :: Double -> Double -> Double -> Int
getSteps stepSize begin end = round (abs (toMM (end-begin)) / stepSize ) :: Int
-- | Convert pixel coordinates to real ones
toMM :: Double -> Double
toMM x = x*frameSize/imageSize
where
frameSize = 25 -- The size of the frame in mm
imageSize = 900 -- The size of the image in pixels
-- | Number of seconds to sleep between runs in a scan
sleep :: Double
sleep = 0
-- | Number of dark runs to perform on each scan.
ndark :: Int
ndark = 1
-- | Exposure time
time :: Double
time = 0.04
x1 :: ScanState -> Scan -> Double -> Double
x1 s (Scan (x,_) _ _) angle = offset s + 12.5 + (toMM x-12.5)* cos angle
x2 :: ScanState -> Scan -> Double -> Double
x2 s (Scan _ (x,_) _) angle = offset s + 12.5 + (toMM x-12.5)* cos angle
y1 :: ScanState -> Scan -> Double
y1 s (Scan (_,y) _ _) = case choice s of
Top -> top s + toMM y
Bottom -> bottom s + toMM y
y2 :: ScanState -> Scan -> Double
y2 s (Scan _ (_,y) _) =case choice s of
Top -> top s + toMM y
Bottom -> bottom s + toMM y
z1 :: ScanState -> Scan -> Double -> Double
z1 _ (Scan (x,_) _ _) angle = (toMM x-12.5)* sin angle
z2 :: ScanState -> Scan -> Double -> Double
z2 _ (Scan _ (x,_) _) angle = (toMM x-12.5)* sin angle
showDouble :: Double -> String
showDouble = printf "%.3f"
scanCommand :: ScanDir -> ScanState -> Scan -> Double -> String
scanCommand Vertical s scan angle =
let moveString = "umv sah " ++ showDouble (x1 s scan angle) ++ " tmp2 " ++ showDouble (z1 s scan angle)
scanString = unwords
["ccdtrans sav", showDouble $ y1 s scan, showDouble $ y2 s scan,
show $ getFrameCount (step s) scan, show time, show sleep, "\"" ++ title scan ++ "\"",
show ndark, "1"]
in moveString ++ newline ++ scanString
scanCommand Horizontal s scan angle =
let moveString = "umv sav " ++ showDouble (y1 s scan)
begin = x1 s scan angle
end = x2 s scan angle
zbegin = z1 s scan angle
zend = z2 s scan angle
n = getFrameCount (step s) scan
scanString = "for(i=0;i<=" ++ show n ++ ";i+=1)" ++ newline
++ "{" ++ newline
++ " y = " ++ showDouble begin ++ "+i*"
++ showDouble ((end-begin)/fromIntegral n) ++ newline
++ " x = " ++ showDouble zbegin ++ "+i*"
++ showDouble ((zend-zbegin)/fromIntegral n) ++ newline
++ " umv sah y" ++ newline
++ " umv tmp2 x" ++ newline
++ unwords [" ccdacq_nodark",show time,"\"" ++ title scan ++ "\""] ++ newline
++ "}" ++ newline
in moveString ++ newline ++ scanString
-- | Determines whether the user has provided enough information to write the script file.
scansReady :: ScanState -> Bool
scansReady s
| null (scans s) = False
| any invalidTitle . map title . scans $ s = False
| null (rotations s) = False
| otherwise = True
invalidTitle :: String -> Bool
invalidTitle "" = True
invalidTitle t = ' ' `elem` t
|
rprospero/PhotoAlign
|
Scans.hs
|
mit
| 14,033 | 1 | 32 | 4,415 | 4,596 | 2,368 | 2,228 | 279 | 3 |
module Functors where
import Control.Lens
-- items from chapter 12 of Programming in Haskell 2nd Ed by Graham Hutton
inc :: [Int] -> [Int]
inc [] = []
inc (n:ns) = n+1 : inc ns
sqr :: [Int] -> [Int]
sqr [] = []
sqr (n:ns) = n^2 : sqr ns
map' :: (a -> b) -> [a] -> [b]
map' f [] = []
map' f (x:xs) = f x : map f xs
inc' = map' (+1)
--class Functor f where
-- fmap :: (a -> b) -> f a -> f b
--instance Functor [] where
-- fmap = map
data Maybe' a = Nothing_ | Just_ a
deriving Show
instance Functor Maybe' where
fmap _ Nothing_ = Nothing_
fmap g (Just_ x) = Just_ (g x)
data Tree a =
Leaf a | Node (Tree a) (Tree a)
deriving Show
instance Functor Tree where
fmap g (Leaf x) = Leaf (g x)
fmap g (Node l r) = Node (fmap g l) (fmap g r)
--instance Functor IO where
-- -- fmap (a -> b) -> f a -> f b
-- -- fmap (a -> b) -> IO a -> IO b
-- fmap g mx = do { x <- mx; return (g x)}
inc'' :: Functor f => f Int -> f Int
inc'' = fmap (+1)
--class Functor f => Applicative f where
-- pure :: a -> f a
-- (<*>) :: f (a -> b) -> f a -> f b
--instance Applicative Maybe where
-- -- pure :: a -> fa
-- pure = Just
-- Nothing <*> _ = Nothing
-- (Just g) <*> mx = fmap g mx
prods :: [Int] -> [Int] -> [Int]
prods xs ys = [ x * y | x <- xs, y <- ys]
prods2 :: [Int] -> [Int] -> [Int]
prods2 xs ys = pure (*) <*> xs <*> ys
|
brodyberg/LearnHaskell
|
CaesarCypher.hsproj/Functors.hs
|
mit
| 1,404 | 0 | 8 | 421 | 510 | 278 | 232 | 29 | 1 |
module Main where
import Control.Applicative
import Control.Concurrent.MVar
import Control.Concurrent
import Data.Time
import System.Environment
main = do
mv <- newEmptyMVar
start <- getCurrentTime
loop mv =<< read . head <$> getArgs
end <- getCurrentTime
putStrLn $ "creation time: " ++ show (diffUTCTime end start)
putMVar mv 0
v <- takeMVar mv
fin <- getCurrentTime
putStrLn $ "Var Value: " ++ (show v) ++ " Message time: " ++ show (diffUTCTime fin end)
loop :: MVar Int -> Int -> IO ()
loop mv n | n <= 0 = return ()
| otherwise = do forkIO $ do
m <- takeMVar mv
putMVar mv $! m+1
loop mv (n-1)
|
nlim/haskell-playground
|
src/Threads.hs
|
mit
| 717 | 0 | 13 | 226 | 258 | 121 | 137 | 22 | 1 |
module Example where
import Data.Monoid
import Control.Monad
import Types
import Drum
import Play
import Dseq
beat1 = mconcat [ sequence_ [hi, hi, hi, hi],
sequence_ [bd, sn, bd, sn] ]
wassup = n2 bd >> n8 bd >> n4 sn >> n2 bd >> n8 bd >> n4 bd >> n2 sn
-- Trap Beat
-- Tempo: 210
h8 = replicateM_ 8 (n8 hi)
h12 = replicateM_ 12 (n8 hi)
trill = replicateM_ 8 (n16 hi)
hats = h8 >> trill >> h12 >> trill >> n4 hi >> n4 hi
trap = hats <> (n1 bd >> n1 sn >> n1 bd >> n1 sn)
-- Today Was A Good Day - Ice Cube
-- Tempo: 160
icecube :: Song
icecube = dseq BassDrum1 8 "7... .... 7... .... 7... .... 7.77 .7.."
<> dseq BassDrum2 8 ".... 7... .... 7... .... 7... .... 7..."
<> dseq SnareDrum2 8 ".... 4... .... 4... .... 4... .... 4..."
<> dseq ClosedHihat 8 "7.7. 7.77 .77. 7.77 7.7. 7.77 .77. ...."
<> dseq OpenHihat 8 ".... .... .... .... .... .... .... .7.."
-- House Beat
-- Tempo: 260
house = dseq MidTom1 8 "9... .9.. 9... 9..."
<> dseq Claves 8 ".9.. .... .9.. ...."
<> dseq ClosedHihat 8 "9... 9... 9... 9..."
<> dseq BassDrum1 8 "..9. .... 9.9. ...."
hMoreHats = house <> dseq ClosedHihat 8 "..9. ..9. ..9. ..9."
hAllHats = hMoreHats <> dseq ClosedHihat 8 ".9.9 .9.9 .9.9 .9.9"
houseSong = sequence_ $ replicate 4 house
++ replicate 4 hMoreHats
++ replicate 4 hAllHats
++ replicate 4 house
-- Amen Brother
-- Tempo: 210
amen = dseq RideCymbal1 8 "7.7. 7.7. 7.7. 7.7."
<> dseq SnareDrum1 8 ".... 7..7 .7.. 7..7"
<> dseq BassDrum1 8 "7.7. .... ..77 ...."
-- The Funky Drummer
-- Tempo: 210
funky = dseq OpenHihat 8 ".... ...7 .... .7.."
<> dseq ClosedHihat 8 "7777 777. 7777 7.77"
<> dseq SnareDrum1 8 ".... 7..7 .7.7 7..7"
<> dseq BassDrum1 8 "7.7. ..7. ..7. .7.."
-- Impeach The President
-- Tempo: 210
impeach = dseq OpenHihat 8 ".... .... ..7. ...."
<> dseq ClosedHihat 8 "7.7. 7.77 7... 7.7."
<> dseq SnareDrum1 8 ".... 7... .... 7..."
<> dseq BassDrum1 8 "7... ...7 7... ..7."
-- When The Levee Breaks
-- Tempo: 210
levee = dseq OpenHihat 8 "7.7. 7.7. 7.7. 7.7."
<> dseq SnareDrum1 8 ".... 7... .... 7..."
<> dseq BassDrum1 8 "77.. ...7 ..77 ...."
-- Cold Sweat
-- Tempo: 210
sweat = dseq RideCymbal1 8 "7.7. 7.7. 7.7. 7.7."
<> dseq SnareDrum1 8 ".... 7..7 .... 7..7"
<> dseq BassDrum1 8 "7... .... 7.7. ...."
-- Billie Jean
-- Tempo: 240
billie = dseq ClosedHihat 8 "7.7. 7.7. 7.7. 7.7."
<> dseq SnareDrum1 8 ".... 7... .... 7..."
<> dseq BassDrum1 8 "7... .... 7... ...."
-- Walk This Way
-- Tempo: 210
walk = dseq ClosedHihat 8 "..7. 7.7. 7.7. 7.7."
<> dseq OpenHihat 8 "7... .... .... ...."
<> dseq SnareDrum1 8 ".... 7... .... 7..."
<> dseq BassDrum1 8 "7... ...7 7.7. ...."
-- Ashley's Roachclip
-- Tempo: 210
ashley = dseq ClosedHihat 8 "7.7. 7.7. 7... 7.7."
<> dseq OpenHihat 8 ".... .... ..7. ...."
<> dseq Tambourine 8 "7.7. 7.7. 7.7. 7.7."
<> dseq SnareDrum1 8 ".... 7... .... 7..."
<> dseq BassDrum1 8 "7.7. ..7. .77. ...."
-- Energy
-- Tempo: 180
energy = dseq ClosedHihat 8 "7777 7777 7777 7777"
<> dseq SnareDrum1 8 ".... 7... .... 7..."
<> dseq OpenHihat 8 ".... 7... .... 7..."
<> dseq BassDrum1 8 "7..7 ..77 ..7. .7.."
energyFlare = (dseq ClosedHihat 8 "7777 7777 7777 77" >> dseq ClosedHihat 16 "777")
<> dseq SnareDrum1 8 ".... 7... .... 7..."
<> dseq OpenHihat 8 ".... 7... .... 7..."
<> dseq BassDrum1 8 "7..7 ..77 ..7. .7.."
energySong = replicateM_ 3 energy >> energyFlare
|
reedrosenbluth/Djembe
|
src/Example.hs
|
mit
| 3,677 | 0 | 11 | 1,011 | 883 | 439 | 444 | 69 | 1 |
module GHCJS.DOM.SQLTransactionCallback (
) where
|
manyoo/ghcjs-dom
|
ghcjs-dom-webkit/src/GHCJS/DOM/SQLTransactionCallback.hs
|
mit
| 52 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.SVGAElement
(getTarget, SVGAElement(..), gTypeSVGAElement) where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGAElement.target Mozilla SVGAElement.target documentation>
getTarget :: (MonadDOM m) => SVGAElement -> m SVGAnimatedString
getTarget self
= liftDOM ((self ^. js "target") >>= fromJSValUnchecked)
|
ghcjs/jsaddle-dom
|
src/JSDOM/Generated/SVGAElement.hs
|
mit
| 1,265 | 0 | 10 | 138 | 345 | 224 | 121 | 21 | 1 |
{-|
Module : Text.LParse.TokenStream
Description : Underlying data structure for sequential parsing
Copyright : (c) Marcus Völker, 2017-2018
License : MIT
Maintainer : [email protected]
This module contains the `TokenStream` class, an abstraction of lists, similar to `Traversable`, but geared for use with LParse
-}
module Text.LParse.TokenStream where
import Data.Either
import Data.Maybe
import Data.Traversable
import Prelude hiding (filter,zip,zipWith,drop)
-- | `TokenStream` abstracts a list, i.e., something that has a next element to process and a rest afterwards
class (Functor t, Foldable t) => TokenStream t where
-- | `top` gives the next element to process. Similar to `head`
top :: t a -> a
-- | `rest` gives what is left after processing `top`. Similar to `tail`
rest :: t a -> t a
-- | `nil` gives the empty `TokenStream`. Similar to `[]`
nil :: t a
-- | `cons` prepends an element to the `TokenStream`. Similar to `(:)`
cons :: a -> t a -> t a
instance TokenStream [] where
top = head
rest = tail
nil = []
cons = (:)
instance TokenStream Maybe where
top = fromJust
rest = const Nothing
nil = Nothing
cons a _ = Just a
instance TokenStream (Either a) where
top = head . rights . return
rest x = if isLeft x then x else nil
nil = Left undefined
cons a _ = Right a
-- | `TokenStream` version of `drop`
drop :: (TokenStream s) => Int -> s a -> s a
drop 0 x = x
drop n x = rest $ drop (n-1) x
-- | `TokenStream` version of `zip`
zip :: (TokenStream s) => s a -> s b -> s (a,b)
zip = zipWith (,)
-- | `TokenStream` version of `zipWith`
zipWith :: (TokenStream s) => (a -> b -> c) -> s a -> s b -> s c
zipWith f l r | null l || null r = nil
| otherwise = f (top l) (top r) `cons` zipWith f (rest l) (rest r)
-- | `TokenStream` version of `filter`
filter :: (TokenStream s) => (a -> Bool) -> s a -> s a
filter c x | null x = nil
| c (top x) = top x `cons` filter c (rest x)
| otherwise = filter c (rest x)
|
MarcusVoelker/LParse
|
src/Text/LParse/TokenStream.hs
|
mit
| 2,078 | 0 | 10 | 529 | 617 | 318 | 299 | 37 | 1 |
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module BinaryTree2 where
-- an experiment to make a Binary tree where the values are stored at the leafs not the nodes.
-- trouble is it makes insertion problematic as I can't tell where to insert values in the tree.
-- so experiment aborted
data BinaryTree2 a =
Leaf a
| Node (BinaryTree2 a) (BinaryTree2 a)
deriving (Eq, Ord, Show)
--insert' :: Ord a => a -> BinaryTree2 a -> BinaryTree2 a
--insert' b (Leaf a)
-- | b == a = Leaf a
-- | b < a = Node (insert' b left) a right
-- | b > a = Node left a (insert' b right)
--
--Node Leaf b Leaf
--insert' b (Node left a right)
-- | b == a = Node left a right
-- | b < a = Node (insert' b left) a right
-- | b > a = Node left a (insert' b right)
|
NickAger/LearningHaskell
|
HaskellProgrammingFromFirstPrinciples/Chapter11.hsproj/BinaryTree2.hs
|
mit
| 764 | 0 | 8 | 182 | 63 | 42 | 21 | 6 | 0 |
{-# LANGUAGE CPP #-}
module GHCJS.DOM.HTMLTableSectionElement (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.HTMLTableSectionElement
#else
module Graphics.UI.Gtk.WebKit.DOM.HTMLTableSectionElement
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.HTMLTableSectionElement
#else
import Graphics.UI.Gtk.WebKit.DOM.HTMLTableSectionElement
#endif
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/HTMLTableSectionElement.hs
|
mit
| 500 | 0 | 5 | 39 | 33 | 26 | 7 | 4 | 0 |
import System.Environment
import Data.List
main = do
args <- getArgs
progName <- getProgName
putStrLn "The arguments are:"
mapM putStrLn args
putStrLn "The program name is:"
putStrLn progName
|
fabriceleal/learn-you-a-haskell
|
09/args.hs
|
mit
| 223 | 0 | 7 | 58 | 56 | 24 | 32 | 9 | 1 |
-- Note: we should only depend on libraries that ship with GHC for this. No
-- external dependencies!
import Control.Monad (when)
import Data.List (concat, isPrefixOf)
import Data.Version (Version, parseVersion)
import Prelude (Bool (..), FilePath, elem, error,
filter, fmap, getLine, lines,
mapM_, null, putStr, putStrLn,
return, show, snd, unwords, ($),
(++), (.), (/=), (<), (==),
(>>=))
import System.Directory (doesFileExist,
getAppUserDataDirectory,
getDirectoryContents, removeFile)
import System.Environment (getArgs, getExecutablePath)
import System.Exit (ExitCode (ExitSuccess))
import System.FilePath (splitExtension, takeDirectory,
takeExtension, takeFileName, (</>))
import System.IO (IO, hFlush, stdout)
import System.Process (rawSystem, readProcess)
import Text.ParserCombinators.ReadP (readP_to_S)
main :: IO ()
main = do
args <- getArgs
putStrLn $ show args
binPath <- fmap takeDirectory getExecutablePath
let sevenz = binPath </> "7z.exe"
getDirectoryContents binPath
>>= mapM_ (un7z binPath sevenz . (binPath </>))
mapM_ (handleArg sevenz) args
removeOldCabal (binPath </> "cabal.exe")
handleArg
:: FilePath -- ^ 7z.exe
-> FilePath -- ^ command line argument
-> IO ()
handleArg sevenz arg = do
putStrLn $ show (sevenz, arg, base, ext)
case ext of
".7z" -> un7z base sevenz arg
".xz" -> do
un7z (takeDirectory base) sevenz arg
handleArg sevenz base
".tar" -> un7z (takeDirectory base) sevenz arg
_ -> error $ "handleArg: " ++ show (sevenz, arg, base, ext)
where
(base, ext) = splitExtension arg
un7z :: FilePath -- ^ dest path
-> FilePath -- ^ 7z.exe
-> FilePath -- ^ to be unpacked
-> IO ()
un7z destPath sevenz =
go
where
exts = [".7z", ".xz", ".tar", ".zip"]
go fp = when (ext `elem` exts) $ do
putStrLn $ "Decompressing " ++ fp ++ " to " ++ destPath
ec <- rawSystem sevenz
(concat [ [ "x"
, "-o" ++ destPath
, "-y"
, fp ]
, [ "stack.exe" | "stack-" `isPrefixOf` (takeFileName fp) ]])
removeFile fp
when (ec /= ExitSuccess)
$ error $ "Could not decompress: " ++ fp
where
ext = takeExtension fp
removeOldCabal :: FilePath -- ^ new cabal
-> IO ()
removeOldCabal newCabal = do
cabalDir <- getAppUserDataDirectory "cabal"
let oldCabal = cabalDir </> "bin/cabal.exe"
exists <- doesFileExist oldCabal
when exists $ do
oldVersion <- getCabalVersion oldCabal
newVersion <- getCabalVersion newCabal
when (oldVersion < newVersion) $ do
putStrLn "You have an older version of cabal-install at:"
putStrLn oldCabal
putStr "It is recommended that you remove it. Shall I do that for you now? (y/n) "
hFlush stdout
let loop = do
s <- getLine
case s of
"y" -> return True
"n" -> return False
_ -> do
putStr "Invalid response, please enter y or n: "
hFlush stdout
loop
toDelete <- loop
when toDelete $ removeFile oldCabal
getCabalVersion :: FilePath -> IO Version
getCabalVersion fp = do
str <- fmap (unwords . lines) $ readProcess fp ["--numeric-version"] ""
case filter (null . snd) $ readP_to_S parseVersion str of
[(v, "")] -> return v
_ -> error $ "Incorrect version: " ++ show (fp, str)
|
fpco/minghc
|
minghc-post-install.hs
|
mit
| 4,306 | 0 | 23 | 1,779 | 1,045 | 548 | 497 | 93 | 4 |
module E10 where
data MyType = TypeOne String
| TypeTwo String
definition :: TypeOne -> Bool
definition (TypeOne aString) = True
{-
-}
|
pascal-knodel/haskell-craft
|
Examples/· Errors/E10.hs
|
mit
| 173 | 0 | 7 | 61 | 42 | 24 | 18 | 5 | 1 |
{-# OPTIONS -fallow-overlapping-instances #-}
{- arch-tag: Object tests main file
Copyright (C) 2005 John Goerzen <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
module Objectstest(tests) where
import Test.HUnit
import Python.Objects
import Foreign.C.Types
import Python.Types
import Data.List
import Python.Interpreter
f msg inp code exp = TestLabel msg $ TestCase $ do pyo <- toPyObject inp
r <- code pyo
exp @=? r
test_base =
[
f "showPyObject" (5::CInt) showPyObject "<type 'int'>: 5"
]
test_lists =
[
f "empty" ([]::[CInt]) fromPyObject ([]::[CInt])
,f "repr empty" ([]::[CInt]) reprOf "[]"
,f "some cints" [1::CInt, 2, 3] fromPyObject [1::CInt, 2, 3]
,f "some cints repr" [1::CInt, 2, 3] reprOf "[1, 2, 3]"
,f "strings" ["foo", "bar"] fromPyObject ["foo", "bar"]
,f "strings repr" ["foo", "bar"] reprOf "['foo', 'bar']"
]
test_al =
[
f "emptypyo" ([]::[(PyObject, PyObject)]) fromPyObject
([]::[(PyObject, PyObject)])
,f "cint to cint" [(1::CInt, 2::CInt), (3, 4)]
(\x -> fromPyObject x >>= return . sort)
[(1::CInt, 2::CInt), (3, 4)]
]
test_functions =
[
f "typestr" (5::CInt) (\x -> typeOf x >>= strOf) "<type 'int'>"
,f "repr" ["foo", "bar"] reprOf "['foo', 'bar']"
]
test_strings =
[
f "empty" ([]::String) fromPyObject ([]::String)
,f "basic" "foo" fromPyObject "foo"
,f "dquotes" "foo\"" fromPyObject "foo\""
,f "squotes" "foo'" fromPyObject "foo'"
,f "embedded null" "foo\0bar" fromPyObject "foo\0bar"
,f "null only" "\0" fromPyObject "\0"
,f "quotes" "\"'\"" fromPyObject "\"'\""
]
test_ints =
[
f "0L" (0::CLong) fromPyObject (0::CLong)
,f "-5L" (-5::CLong) fromPyObject (-5::CLong)
,f "5L" (5::CLong) fromPyObject (5::CLong)
,f "max long" (maxBound::CLong) fromPyObject (maxBound::CLong)
,f "min long" (minBound::CLong) fromPyObject (minBound::CLong)
,f "0i" (0::CInt) fromPyObject (0::CInt)
,f "-5i" (-5::CInt) fromPyObject (-5::CInt)
,f "5i" (5::CInt) fromPyObject (5::CInt)
,f "min int" (minBound::CInt) fromPyObject (minBound::CInt)
,f "max int" (maxBound::CInt) fromPyObject (maxBound::CInt)
,f "long/int" (12345::CLong) fromPyObject (12345::CInt)
,f "int/long" (12354::CInt) fromPyObject (12354::CInt)
,f "repr max" (maxBound::CLong) reprOf (show (maxBound::CLong))
,f "str min" (minBound::CLong) strOf (show (minBound::CLong))
]
test_longs =
[
f "0" (0::Integer) fromPyObject (0::Integer)
,f "-5" (-5::Integer) fromPyObject (-5::Integer)
,f "5" (5::Integer) fromPyObject (5::Integer)
,f "2^384" ((2 ^ 384)::Integer) fromPyObject ((2 ^ 384)::Integer)
,f "2^384*-1" (( 2 ^ 384 * (-1))::Integer) fromPyObject ((2 ^ 384 * (-1))::Integer)
,f "str 2^384" ((2 ^ 384)::Integer) strOf (show ((2 ^ 384)::Integer))
]
test_doubles =
[
f "0" (0::CDouble) fromPyObject (0::CDouble)
,f "-5" (-5::CDouble) fromPyObject (-5::CDouble)
,f "5.1234" (5.1234::CDouble) fromPyObject (5.1234::CDouble)
,f "str 5.1234" (5.1234::CDouble) strOf "5.1234"
,f "2^384" ((2^384)::CDouble) fromPyObject ((2^384)::CDouble)
,f "2^384*-1" ((2^384 * (-1)::CDouble)) fromPyObject ((2^384 * (-1)::CDouble))
,f "1/(2^384)" ((1 / (2 ^ 384))::CDouble) fromPyObject
((1 / (2 ^ 384))::CDouble)
]
test_dicts =
[
f "empty" ([]::[(String, String)]) fromPyObject ([]::[(String, String)])
,f "one s" [("foo", "bar")] fromPyObject [("foo", "bar")]
,f "mult s" [("foo", "bar"), ("quux", "baz")]
(\x -> fromPyObject x >>= return . sort)
[("foo", "bar"), ("quux", "baz")]
,f "s2i" [("foo", 1::CLong), ("quux", 2)]
(\x -> fromPyObject x >>= return . sort)
[("foo", 1::CLong), ("quux", 2)]
]
test_call =
[
TestCase $ do func <- pyRun_String "repr" Py_eval_input []
r <- pyObject_CallHs func [5::Integer] ([]::[(String, String)])
"5L" @=? r
]
test_dir =
[
TestCase $ do dv <- toPyObject ([]::String) >>= dirPyObject
assertBool "replace" $ "replace" `elem` dv
assertBool "rindex" $ "rindex" `elem` dv
]
test_attr =
[
TestCase $ do pyImport "md5"
md5 <- pyRun_String "md5.md5()" Py_eval_input []
fupdate <- getattr md5 "update"
fhexdigest <- getattr md5 "hexdigest"
pyObject_RunHs fupdate ["hi"] noKwParms
pyObject_RunHs fupdate ["there"] noKwParms
r <- pyObject_CallHs fhexdigest noParms noKwParms
"a8b767bb9cf0938dc7f40603f33987e5" @=? r
,TestCase $ do pyImport "md5"
md5 <- pyRun_String "md5.md5()" Py_eval_input []
runMethodHs md5 "update" ["hi"] noKwParms
runMethodHs md5 "update" ["there"] noKwParms
r <- callMethodHs md5 "hexdigest" noParms noKwParms
"a8b767bb9cf0938dc7f40603f33987e5" @=? r
]
tests = TestList [TestLabel "base" (TestList test_base),
TestLabel "lists/tuples" (TestList test_lists),
TestLabel "al" (TestList test_al),
TestLabel "functions" (TestList test_functions),
TestLabel "strings" (TestList test_strings),
TestLabel "ints" (TestList test_ints),
TestLabel "longs" (TestList test_longs),
TestLabel "doubles" (TestList test_doubles),
TestLabel "dir" (TestList test_dir),
TestLabel "call" (TestList test_call),
TestLabel "attr" (TestList test_attr),
TestLabel "dict" (TestList test_dicts)
]
|
jgoerzen/missingpy
|
testsrc/Objectstest.hs
|
gpl-2.0
| 6,618 | 0 | 13 | 1,827 | 2,119 | 1,177 | 942 | 124 | 1 |
-- DivideVencerasOrdRapida.hs
-- Divide y vencerás: ordenación rápida.
-- José A. Alonso Jiménez https://jaalonso.github.com
-- =====================================================================
module Tema_23.DivideVencerasOrdRapida where
-- Hay que elegir una implementación
import Tema_23.DivideVenceras
-- import I1M.DivideVenceras
-- (ordenaRapida xs) es la lista obtenida ordenando xs por el
-- procedimiento de ordenación rápida. Por ejemplo,
-- λ> ordenaRapida [3,1,4,1,5,9,2,8]
-- [1,1,2,3,4,5,8,9]
ordenaRapida :: Ord a => [a] -> [a]
ordenaRapida = divideVenceras ind id divide combina
where
ind xs = length xs <= 1
divide (x:xs) = [[ y | y<-xs, y<=x],
[ y | y<-xs, y>x] ]
divide [] = []
combina (x:_) [l1,l2] = l1 ++ [x] ++ l2
combina _ _ = error "Imposible"
|
jaalonso/I1M-Cod-Temas
|
src/Tema_23/DivideVencerasOrdRapida.hs
|
gpl-2.0
| 887 | 0 | 10 | 215 | 192 | 107 | 85 | 10 | 3 |
module Tema_23c_BEE_Reinas_Spec (main, spec) where
import Tema_23.BEE_Reinas
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "valida" $ do
it "e1" $
valida [(1,1)] (2,2) `shouldBe` False
it "e2" $
valida [(1,1)] (2,3) `shouldBe` True
describe "sucesoresNR" $
it "e1" $
sucesoresNR (1,4,[]) `shouldBe`
[(2,4,[(1,1)]),(2,4,[(1,2)]),(2,4,[(1,3)]),(2,4,[(1,4)])]
describe "buscaEE_NR 8" $
it "e1" $
buscaEE_NR 8 `shouldBe`
[(1,1),(2,5),(3,8),(4,6),(5,3),(6,7),(7,2),(8,4)]
describe "nSolucionesNR" $
it "e1" $
nSolucionesNR 8 `shouldBe` 92
|
jaalonso/I1M-Cod-Temas
|
test/Tema_23c_BEE_Reinas_Spec.hs
|
gpl-2.0
| 654 | 0 | 14 | 158 | 377 | 220 | 157 | 23 | 1 |
import Multiple
main :: IO ()
main = readLn >>= print . solve
solve :: Integer -> Integer
solve p = f 3 + f 5 - f 15
where f = multiples_under p
|
NorfairKing/project-euler
|
001/haskell/solution.hs
|
gpl-2.0
| 159 | 1 | 7 | 48 | 75 | 35 | 40 | 6 | 1 |
-- -*- mode: haskell -*-
{-# LANGUAGE TemplateHaskell #-}
module NPDA.Property where
import NPDA.Type
import Condition
import Autolib.Reporter
import Autolib.Reporter.Type
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
data Property = Det
| Sane
| Accept_by Acceptance_Mode
deriving ( Eq, Ord, Typeable )
data Acceptance_Mode = Empty_Stack
| Final_States
deriving ( Eq, Ord, Typeable )
$(derives [makeReader, makeToDoc] [''Property])
$(derives [makeReader, makeToDoc] [''Acceptance_Mode])
|
florianpilz/autotool
|
src/NPDA/Property.hs
|
gpl-2.0
| 556 | 0 | 9 | 111 | 143 | 82 | 61 | 18 | 0 |
module Main where {
import Salsa20;
import qualified Data.ByteString.Lazy as Lazy;
main :: IO();
-- sadly normal String putStr fails here due to unicode.
main = do {
fi <- Lazy.getContents;
let { (key, salt) = Lazy.splitAt 32 fi };
{-^ We expact exactly 32 + 24 bytes of input -}
Lazy.putStr $ Lazy.pack $ xsalsa (Lazy.unpack key) (Lazy.unpack salt);
-- Approximately 310 million years to consume all 2^64 blocks until
-- the block counter overflows.
};
-- 1200 times slower than the C version.
}
|
kenta2/yescrypt
|
Xsalsa.hs
|
gpl-3.0
| 498 | 0 | 11 | 87 | 117 | 70 | 47 | 8 | 1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 [email protected]
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
{-# LANGUAGE EmptyDataDecls, MultiParamTypeClasses #-}
module LevelTools.EditType
(
EditType (..),
) where
import MyPrelude
import Game
import Game.LevelPuzzleMode.LevelPuzzleWorld
import LevelTools.SemiContent
import LevelTools.SemiRoom
data EditType =
TypeEmpty |
TypeDotPlain |
TypeDotBonus |
TypeDotTele |
TypeDotFinish |
TypeWall
deriving Show
|
karamellpelle/grid
|
designer/source/LevelTools/EditType.hs
|
gpl-3.0
| 1,155 | 0 | 5 | 231 | 82 | 60 | 22 | 17 | 0 |
-- This is the alternate command line argument parser frontend. It takes
-- arguments compatible with the reference parser for COP5555. It is compatible
-- with class requirements. If you're actually looking to use the program, I'd
-- recommend the GNU-style frontend. It's better.
module Main (main) where
import System.Environment
import Control.Monad
import OptionHandler
optUsageInfo :: String
optUsageInfo =
unlines [ "Usage: hs-rpal [OPTION...] FILE"
, " -version: Shows the version number"
, " -l: Print the raw program to stdout"
, " -ast: Print the Abstract Syntax Tree to stdout"
, " -st: Print the partially standardized AST to stdout"
, " -fst: Print the fully standardized AST to stdout"
, " -lex: Print all the tokens to stdout"
, " -control: Print all the control structures to stdout"
, " -noout: Skip evaluation (useful with -ast, -lex, etc)"
]
-- Processes arguments prefixed with a dash
optParseArg :: Opt -> String -> Opt
optParseArg o "version" = o { optVersion = True }
optParseArg o "l" = o { optListing = True }
optParseArg o "ast" = o { optAst = True }
optParseArg o "st" = o { optPartialSt = True }
optParseArg o "fst" = o { optFullSt = True }
optParseArg o "lex" = o { optLex = True }
optParseArg o "control" = o { optControl = True }
optParseArg o "noout" = o { optQuiet = True }
optParseArg _ arg = error $ "Unrecognized option: '-" ++ arg ++ "'\n"
++ optUsageInfo
optParse :: Opt -> [String] -> Opt
optParse o (argHead : argTail) =
case argHead :: String of
'-' : a -> optParse (optParseArg o a) argTail
a -> optParse (o {optFile = Just a }) argTail
optParse o [] = o
main :: IO ()
main = (liftM $ optParse optDefaults) getArgs >>= optProcess
|
bgw/hs-rpal
|
src/hsRpalCompat.hs
|
gpl-3.0
| 1,965 | 0 | 12 | 597 | 382 | 212 | 170 | 34 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module THVersion where
import Language.Haskell.TH
|
jplLloyd/pointwise-composition
|
src/Data/THVersion.hs
|
gpl-3.0
| 86 | 0 | 4 | 12 | 11 | 8 | 3 | 3 | 0 |
module Engine.Graphics.Assets.ImageLoader(loadImage) where
import Codec.Picture
import Codec.Picture.Types
import System.Exit (exitFailure)
loadImage :: FilePath -> IO (Image PixelRGBA8)
loadImage fp =
do res <- readPng fp
case res of
Left err ->
do putStrLn $ "error loading image: " ++ fp
print err
exitFailure
Right rawImg -> return $ convertImg rawImg
convertImg :: DynamicImage -> Image PixelRGBA8
convertImg (ImageRGBA8 img) = img
convertImg (ImageY8 img) = promoteImage img
convertImg (ImageYA8 img) = promoteImage img
convertImg (ImageRGB8 img) = promoteImage img
convertImg _ = error "Wrong filetype, use PNG with RGBA8"
|
halvorgb/AO2D
|
src/Engine/Graphics/Assets/ImageLoader.hs
|
gpl-3.0
| 752 | 0 | 13 | 211 | 210 | 103 | 107 | 19 | 2 |
module Core.Square (
square
) where
square :: Int -> Int
square x = x * x
|
adarqui/ToyBox
|
haskell/Core/src/Core/Square.hs
|
gpl-3.0
| 76 | 0 | 5 | 19 | 32 | 18 | 14 | 4 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.MapsEngine.Layers.UnPublish
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Unpublish a layer asset.
--
-- /See:/ <https://developers.google.com/maps-engine/ Google Maps Engine API Reference> for @mapsengine.layers.unpublish@.
module Network.Google.Resource.MapsEngine.Layers.UnPublish
(
-- * REST Resource
LayersUnPublishResource
-- * Creating a Request
, layersUnPublish
, LayersUnPublish
-- * Request Lenses
, lupId
) where
import Network.Google.MapsEngine.Types
import Network.Google.Prelude
-- | A resource alias for @mapsengine.layers.unpublish@ method which the
-- 'LayersUnPublish' request conforms to.
type LayersUnPublishResource =
"mapsengine" :>
"v1" :>
"layers" :>
Capture "id" Text :>
"unpublish" :>
QueryParam "alt" AltJSON :>
Post '[JSON] PublishResponse
-- | Unpublish a layer asset.
--
-- /See:/ 'layersUnPublish' smart constructor.
newtype LayersUnPublish = LayersUnPublish'
{ _lupId :: Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'LayersUnPublish' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lupId'
layersUnPublish
:: Text -- ^ 'lupId'
-> LayersUnPublish
layersUnPublish pLupId_ =
LayersUnPublish'
{ _lupId = pLupId_
}
-- | The ID of the layer.
lupId :: Lens' LayersUnPublish Text
lupId = lens _lupId (\ s a -> s{_lupId = a})
instance GoogleRequest LayersUnPublish where
type Rs LayersUnPublish = PublishResponse
type Scopes LayersUnPublish =
'["https://www.googleapis.com/auth/mapsengine"]
requestClient LayersUnPublish'{..}
= go _lupId (Just AltJSON) mapsEngineService
where go
= buildClient
(Proxy :: Proxy LayersUnPublishResource)
mempty
|
rueshyna/gogol
|
gogol-maps-engine/gen/Network/Google/Resource/MapsEngine/Layers/UnPublish.hs
|
mpl-2.0
| 2,670 | 0 | 13 | 641 | 303 | 186 | 117 | 49 | 1 |
{-
passman
Copyright (C) 2018-2021 Jonathan Lamothe
<[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this program. If not, see
<https://www.gnu.org/licenses/>.
-}
module Spec.PWHasService (tests) where
import qualified Data.Map as M
import System.Random (mkStdGen, StdGen)
import Test.HUnit (Test (..), (~?=))
import Password
tests :: Test
tests = TestLabel "pwHasService" $ TestList $ map test'
[ ( "empty database", "foo", newPWDatabase, False )
, ( "in database", "foo", database, True )
, ( "not found", "quux", database, False )
]
test' :: (String, String, PWDatabase, Bool) -> Test
test' (label, x, db, expect) = TestLabel label $
pwHasService x db ~?= expect
database :: M.Map String PWData
database = M.fromList
[ ( "foo", foo )
, ( "bar", bar )
, ( "baz", baz )
]
foo :: PWData
g' :: StdGen
(foo, g') = newPWData g
bar :: PWData
g'' :: StdGen
(bar, g'') = newPWData g'
baz :: PWData
(baz, _) = newPWData g''
g :: StdGen
g = mkStdGen 1
--jl
|
jlamothe/passman
|
test/Spec/PWHasService.hs
|
lgpl-3.0
| 1,553 | 0 | 8 | 310 | 316 | 188 | 128 | 28 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE PackageImports #-}
{-
Copyright 2018 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module Internal.Text
( Text
, fromString
, toString
, fromCWText
, toCWText
, (<>)
, numberOfCharacters
, numberOfWords
, numberOfLines
, lines
, unlines
, words
, unwords
, characters
, printed
, joined
, joinedWith
, lowercase
, uppercase
, startsWith
, endsWith
, substitution
, substitutions
) where
import Data.List (foldl')
import Data.Maybe
import Numeric
import qualified "base" Prelude as P
import "base" Prelude (Bool, String, (.), length, map, show)
import qualified Data.JSString as J
import qualified Data.JSString.Text as J
import qualified Data.Text as T
import Internal.Num
import Internal.Truth
newtype Text = T
{ unT :: J.JSString
} deriving (P.Eq)
{-# RULES
"equality/text" forall (x :: Text) . (==) x = (P.==) x
#-}
fromString :: String -> Text
fromString = T . J.pack
toString :: Text -> String
toString = J.unpack . unT
fromCWText :: Text -> T.Text
fromCWText = J.textFromJSString . unT
toCWText :: T.Text -> Text
toCWText = T . J.textToJSString
infixr 6 <>
(<>) :: Text -> Text -> Text
T a <> T b = T (J.append a b)
numberOfCharacters :: Text -> Number
numberOfCharacters = fromInt . J.length . unT
numberOfWords :: Text -> Number
numberOfWords = fromInt . length . J.words . unT
numberOfLines :: Text -> Number
numberOfLines = fromInt . length . J.lines . unT
lines :: Text -> [Text]
lines = map T . J.lines . unT
unlines :: [Text] -> Text
unlines = T . J.unlines . map unT
words :: Text -> [Text]
words = map T . J.words . unT
unwords :: [Text] -> Text
unwords = T . J.unwords . map unT
characters :: Text -> [Text]
characters = map (T . J.singleton) . J.unpack . unT
printed :: Number -> Text
printed = T . J.pack . show
joined :: [Text] -> Text
joined = T . J.concat . map unT
joinedWith :: ([Text], Text) -> Text
joinedWith (ts, T sep) = T (J.intercalate sep (map unT ts))
lowercase :: Text -> Text
lowercase = T . J.toLower . unT
uppercase :: Text -> Text
uppercase = T . J.toUpper . unT
startsWith :: (Text, Text) -> Truth
startsWith (T a, T b) = J.isPrefixOf b a
endsWith :: (Text, Text) -> Truth
endsWith (T a, T b) = J.isSuffixOf b a
-- | Gives the result of replacing one piece of text with another.
--
-- For example, @substitution("How do you do?", "do", "be")@ is equal to
-- @"How be you be?"@.
substitution :: (Text, Text, Text) -> Text
substitution (T text, T from, T to) = T (J.replace from to text)
-- | Gives the result of performing many substitutions in a piece of
-- text. This is commonly used to build text to show in a program,
-- as in this example:
--
-- substitutions("Lives: [lives] of 3 Score: [score]",
-- [("[lives]", printed(lives)),
-- ("[score]", printed(score))])
substitutions :: (Text, [(Text, Text)]) -> Text
substitutions (T text, replacements) =
T (foldl' (\a (T b, T c) -> J.replace b c a) text replacements)
|
tgdavies/codeworld
|
codeworld-base/src/Internal/Text.hs
|
apache-2.0
| 3,631 | 0 | 11 | 804 | 963 | 548 | 415 | 87 | 1 |
-- | read-write lock specialized for using LMDB with MDB_NOLOCK option
--
module Database.VCache.RWLock
( RWLock
, newRWLock
, withRWLock
, withRdOnlyLock
) where
import Control.Monad
import Control.Exception
import Control.Concurrent.MVar
import Data.IORef
import Data.IntSet (IntSet)
import qualified Data.IntSet as IntSet
-- | RWLock
--
-- VCache uses LMDB with the MDB_NOLOCK option, mostly because I don't
-- want to deal with the whole issue of OS bound threads or a limit on
-- number of concurrent readers. Without locks, we essentially have one
-- valid snapshot. The writer can begin dismantling earlier snapshots
-- as needed to allocate pages.
--
-- RWLock essentially enforces this sort of frame-buffer concurrency.
data RWLock = RWLock
{ rwlock_frames :: !(MVar FB)
, rwlock_writer :: !(MVar ()) -- enforce single writer
}
data FB = FB !F !F
type F = IORef Frame
data Frame = Frame
{ frame_reader_next :: {-# UNPACK #-} !Int
, frame_readers :: !IntSet
, frame_onClear :: ![IO ()] -- actions to perform
}
frame0 :: Frame
frame0 = Frame 1 IntSet.empty []
newRWLock :: IO RWLock
newRWLock = liftM2 RWLock (newMVar =<< newF2) newEmptyMVar where
newF2 :: IO FB
newF2 = liftM2 FB newF newF
newF :: IO F
newF = newIORef frame0
withWriterMutex :: RWLock -> IO a -> IO a
withWriterMutex l = bracket_ getLock dropLock where
getLock = putMVar (rwlock_writer l) ()
dropLock = takeMVar (rwlock_writer l)
{-# INLINE withWriterMutex #-}
-- | Grab the current read-write lock for the duration of
-- an underlying action. This may wait on older readers.
withRWLock :: RWLock -> IO a -> IO a
withRWLock l action = withWriterMutex l $ do
oldFrame <- rotateReaderFrames l
mvWait <- newEmptyMVar
onFrameCleared oldFrame (putMVar mvWait ())
takeMVar mvWait
action
-- rotate a fresh reader frame, and grab the oldest.
-- Thus should only be performed while holding the writer lock.
rotateReaderFrames :: RWLock -> IO F
rotateReaderFrames l = mask_ $ do
let var = rwlock_frames l
f0 <- newF
(FB f1 f2) <- takeMVar var
putMVar var (FB f0 f1)
return f2
--
-- NOTE: Each of these 'frames' actually contains readers of two
-- transactions. Alignment between LMDB transactions and VCache
-- RWLock isn't exact.
--
-- Each write lock will rotate reader frames just once:
--
-- (f1,f2) → (f0,f1) returning f2
--
-- Writer is working on LMDB frame N.
--
-- f0 will have readers for frame N-1 and (after commit) for N.
-- f1 will have readers for frame N-2 and some for N-1.
-- f2 will have readers for frame N-3 and some for N-2.
--
-- LMDB guarantees that the data pages for frames N-1 and N-2 are
-- intact. However, frame N-3 will be dismantled while building
-- frame N. Thus, we must wait for f2 readers to finish before we
-- begin the writer N transaction.
--
-- If we assume short-running readers and long-running writers, it
-- is rare that the writer ever needs to wait on readers. Readers
-- never need to wait on the writer. This assumption is achieved by
-- batching writes in VCache.
--
-- perform some action when a frame is cleared
-- performs immediately, if possible.
onFrameCleared :: F -> IO () -> IO ()
onFrameCleared f action = atomicModifyIORef f addAction >>= id where
addAction frame =
let bAlreadyClear = IntSet.null (frame_readers frame) in
if bAlreadyClear then (frame0,action) else
let onClear' = action : frame_onClear frame in
let frame' = frame { frame_onClear = onClear' } in
(frame', return ())
-- | Grab a read-only lock for the duration of some IO action.
--
-- Readers never need to wait on the writer.
withRdOnlyLock :: RWLock -> IO a -> IO a
withRdOnlyLock l = bracket (newReader l) releaseReader . const
newtype Reader = Reader { releaseReader :: IO () }
-- obtains a reader handle; returns function to release reader.
newReader :: RWLock -> IO Reader
newReader l = mask_ $ do
let var = rwlock_frames l
fb@(FB f _) <- takeMVar var
r <- atomicModifyIORef f addReader
putMVar var fb
return (Reader (delReader f r))
addReader :: Frame -> (Frame, Int)
addReader f =
let r = frame_reader_next f in
let rdrs' = IntSet.insert r (frame_readers f) in
let f' = f { frame_reader_next = (r + 1)
, frame_readers = rdrs' } in
(f', r)
delReader :: F -> Int -> IO ()
delReader f r = atomicModifyIORef f del >>= sequence_ where
del frm =
let rdrs' = IntSet.delete r (frame_readers frm) in
if IntSet.null rdrs' then (frame0, frame_onClear frm) else
let frm' = frm { frame_readers = rdrs' } in
(frm', [])
|
bitemyapp/haskell-vcache
|
hsrc_lib/Database/VCache/RWLock.hs
|
bsd-2-clause
| 4,694 | 0 | 17 | 1,057 | 1,033 | 545 | 488 | 91 | 2 |
-- | This module exports the templates for automatic instance deriving of "Transformation.Shallow" type classes. The most
-- common way to use it would be
--
-- > import qualified Transformation.Shallow.TH
-- > data MyDataType f' f = ...
-- > $(Transformation.Shallow.TH.deriveFunctor ''MyDataType)
--
{-# Language CPP, TemplateHaskell #-}
-- Adapted from https://wiki.haskell.org/A_practical_Template_Haskell_Tutorial
module Transformation.Shallow.TH (deriveAll, deriveFunctor, deriveFoldable, deriveTraversable)
where
import Control.Applicative (liftA2)
import Control.Monad (replicateM)
import Data.Functor.Compose (Compose(getCompose))
import Data.Functor.Const (Const(getConst))
import Data.Maybe (fromMaybe)
import Data.Monoid (Monoid, (<>))
import Language.Haskell.TH
import Language.Haskell.TH.Syntax (BangType, VarBangType, getQ, putQ)
import qualified Transformation
import qualified Transformation.Shallow
data Deriving = Deriving { _constructor :: Name, _variable :: Name }
deriveAll :: Name -> Q [Dec]
deriveAll ty = foldr f (pure []) [deriveFunctor, deriveFoldable, deriveTraversable]
where f derive rest = (<>) <$> derive ty <*> rest
deriveFunctor :: Name -> Q [Dec]
deriveFunctor typeName = do
t <- varT <$> newName "t"
(instanceType, cs) <- reifyConstructors typeName
let shallowConstraint ty = conT ''Transformation.Shallow.Functor `appT` t `appT` ty
baseConstraint ty = conT ''Transformation.At `appT` t `appT` ty
(constraints, dec) <- genShallowmap shallowConstraint baseConstraint instanceType cs
sequence [instanceD (cxt $ appT (conT ''Transformation.Transformation) t : map pure constraints)
(shallowConstraint instanceType)
[pure dec]]
deriveFoldable :: Name -> Q [Dec]
deriveFoldable typeName = do
t <- varT <$> newName "t"
m <- varT <$> newName "m"
(instanceType, cs) <- reifyConstructors typeName
let shallowConstraint ty = conT ''Transformation.Shallow.Foldable `appT` t `appT` ty
baseConstraint ty = conT ''Transformation.At `appT` t `appT` ty
(constraints, dec) <- genFoldMap shallowConstraint baseConstraint instanceType cs
sequence [instanceD (cxt (appT (conT ''Transformation.Transformation) t :
appT (appT equalityT (conT ''Transformation.Codomain `appT` t))
(conT ''Const `appT` m) :
appT (conT ''Monoid) m : map pure constraints))
(shallowConstraint instanceType)
[pure dec]]
deriveTraversable :: Name -> Q [Dec]
deriveTraversable typeName = do
t <- varT <$> newName "t"
m <- varT <$> newName "m"
f <- varT <$> newName "f"
(instanceType, cs) <- reifyConstructors typeName
let shallowConstraint ty = conT ''Transformation.Shallow.Traversable `appT` t `appT` ty
baseConstraint ty = conT ''Transformation.At `appT` t `appT` ty
(constraints, dec) <- genTraverse shallowConstraint baseConstraint instanceType cs
sequence [instanceD (cxt (appT (conT ''Transformation.Transformation) t :
appT (appT equalityT (conT ''Transformation.Codomain `appT` t))
(conT ''Compose `appT` m `appT` f) :
appT (conT ''Applicative) m : map pure constraints))
(shallowConstraint instanceType)
[pure dec]]
substitute :: Type -> Q Type -> Q Type -> Q Type
substitute resultType = liftA2 substitute'
where substitute' instanceType argumentType =
substituteVars (substitutions resultType instanceType) argumentType
substitutions (AppT t1 (VarT name1)) (AppT t2 (VarT name2)) = (name1, name2) : substitutions t1 t2
substitutions _t1 _t2 = []
substituteVars subs (VarT name) = VarT (fromMaybe name $ lookup name subs)
substituteVars subs (AppT t1 t2) = AppT (substituteVars subs t1) (substituteVars subs t2)
substituteVars _ t = t
reifyConstructors :: Name -> Q (TypeQ, [Con])
reifyConstructors ty = do
(TyConI tyCon) <- reify ty
(tyConName, tyVars, _kind, cs) <- case tyCon of
DataD _ nm tyVars kind cs _ -> return (nm, tyVars, kind, cs)
NewtypeD _ nm tyVars kind c _ -> return (nm, tyVars, kind, [c])
_ -> fail "deriveApply: tyCon may not be a type synonym."
#if MIN_VERSION_template_haskell(2,17,0)
let (KindedTV tyVar _ (AppT (AppT ArrowT StarT) StarT) : _) = reverse tyVars
instanceType = foldl apply (conT tyConName) (reverse $ drop 1 $ reverse tyVars)
apply t (PlainTV name _) = appT t (varT name)
apply t (KindedTV name _ _) = appT t (varT name)
#else
let (KindedTV tyVar (AppT (AppT ArrowT StarT) StarT) : _) = reverse tyVars
instanceType = foldl apply (conT tyConName) (reverse $ drop 1 $ reverse tyVars)
apply t (PlainTV name) = appT t (varT name)
apply t (KindedTV name _) = appT t (varT name)
#endif
putQ (Deriving tyConName tyVar)
return (instanceType, cs)
genShallowmap :: (Q Type -> Q Type) -> (Q Type -> Q Type) -> Q Type -> [Con] -> Q ([Type], Dec)
genShallowmap shallowConstraint baseConstraint instanceType cs = do
(constraints, clauses) <- unzip <$> mapM (genShallowmapClause shallowConstraint baseConstraint instanceType) cs
return (concat constraints, FunD '(Transformation.Shallow.<$>) clauses)
genFoldMap :: (Q Type -> Q Type) -> (Q Type -> Q Type) -> Q Type -> [Con] -> Q ([Type], Dec)
genFoldMap shallowConstraint baseConstraint instanceType cs = do
(constraints, clauses) <- unzip <$> mapM (genFoldMapClause shallowConstraint baseConstraint instanceType) cs
return (concat constraints, FunD 'Transformation.Shallow.foldMap clauses)
genTraverse :: (Q Type -> Q Type) -> (Q Type -> Q Type) -> Q Type -> [Con] -> Q ([Type], Dec)
genTraverse shallowConstraint baseConstraint instanceType cs = do
(constraints, clauses) <- unzip
<$> mapM (genTraverseClause genTraverseField shallowConstraint baseConstraint instanceType) cs
return (concat constraints, FunD 'Transformation.Shallow.traverse clauses)
genShallowmapClause :: (Q Type -> Q Type) -> (Q Type -> Q Type) -> Q Type -> Con -> Q ([Type], Clause)
genShallowmapClause shallowConstraint baseConstraint _instanceType (NormalC name fieldTypes) = do
t <- newName "t"
fieldNames <- replicateM (length fieldTypes) (newName "x")
let pats = [varP t, parensP (conP name $ map varP fieldNames)]
constraintsAndFields = zipWith newField fieldNames fieldTypes
newFields = map (snd <$>) constraintsAndFields
body = normalB $ appsE $ conE name : newFields
newField :: Name -> BangType -> Q ([Type], Exp)
newField x (_, fieldType) = genShallowmapField (varE t) fieldType shallowConstraint baseConstraint (varE x) id
constraints <- (concat . (fst <$>)) <$> sequence constraintsAndFields
(,) constraints <$> clause pats body []
genShallowmapClause shallowConstraint baseConstraint _instanceType (RecC name fields) = do
t <- newName "t"
x <- newName "x"
let body = normalB $ recConE name $ (snd <$>) <$> constraintsAndFields
constraintsAndFields = map newNamedField fields
newNamedField :: VarBangType -> Q ([Type], (Name, Exp))
newNamedField (fieldName, _, fieldType) =
((,) fieldName <$>)
<$> genShallowmapField (varE t) fieldType shallowConstraint baseConstraint (appE (varE fieldName) (varE x)) id
constraints <- (concat . (fst <$>)) <$> sequence constraintsAndFields
(,) constraints <$> clause [varP t, x `asP` recP name []] body []
genShallowmapClause shallowConstraint baseConstraint instanceType
(GadtC [name] fieldTypes (AppT resultType (VarT tyVar))) =
do Just (Deriving tyConName _tyVar) <- getQ
putQ (Deriving tyConName tyVar)
genShallowmapClause (shallowConstraint . substitute resultType instanceType)
(baseConstraint . substitute resultType instanceType)
instanceType (NormalC name fieldTypes)
genShallowmapClause shallowConstraint baseConstraint instanceType
(RecGadtC [name] fields (AppT resultType (VarT tyVar))) =
do Just (Deriving tyConName _tyVar) <- getQ
putQ (Deriving tyConName tyVar)
genShallowmapClause (shallowConstraint . substitute resultType instanceType)
(baseConstraint . substitute resultType instanceType)
instanceType (RecC name fields)
genShallowmapClause shallowConstraint baseConstraint instanceType (ForallC _vars _cxt con) =
genShallowmapClause shallowConstraint baseConstraint instanceType con
genFoldMapClause :: (Q Type -> Q Type) -> (Q Type -> Q Type) -> Q Type -> Con -> Q ([Type], Clause)
genFoldMapClause shallowConstraint baseConstraint _instanceType (NormalC name fieldTypes) = do
t <- newName "t"
fieldNames <- replicateM (length fieldTypes) (newName "x")
let pats = [varP t, conP name (map varP fieldNames)]
constraintsAndFields = zipWith newField fieldNames fieldTypes
body | null fieldNames = [| mempty |]
| otherwise = foldr1 append $ (snd <$>) <$> constraintsAndFields
append a b = [| $(a) <> $(b) |]
newField :: Name -> BangType -> Q ([Type], Exp)
newField x (_, fieldType) = genFoldMapField (varE t) fieldType shallowConstraint baseConstraint (varE x) id
constraints <- (concat . (fst <$>)) <$> sequence constraintsAndFields
(,) constraints <$> clause pats (normalB body) []
genFoldMapClause shallowConstraint baseConstraint _instanceType (RecC name fields) = do
t <- newName "t"
x <- newName "x"
let body | null fields = [| mempty |]
| otherwise = foldr1 append $ (snd <$>) <$> constraintsAndFields
constraintsAndFields = map newField fields
append a b = [| $(a) <> $(b) |]
newField :: VarBangType -> Q ([Type], Exp)
newField (fieldName, _, fieldType) =
genFoldMapField (varE t) fieldType shallowConstraint baseConstraint (appE (varE fieldName) (varE x)) id
constraints <- (concat . (fst <$>)) <$> sequence constraintsAndFields
(,) constraints <$> clause [varP t, x `asP` recP name []] (normalB body) []
genFoldMapClause shallowConstraint baseConstraint instanceType
(GadtC [name] fieldTypes (AppT resultType (VarT tyVar))) =
do Just (Deriving tyConName _tyVar) <- getQ
putQ (Deriving tyConName tyVar)
genFoldMapClause (shallowConstraint . substitute resultType instanceType)
(baseConstraint . substitute resultType instanceType)
instanceType (NormalC name fieldTypes)
genFoldMapClause shallowConstraint baseConstraint instanceType
(RecGadtC [name] fields (AppT resultType (VarT tyVar))) =
do Just (Deriving tyConName _tyVar) <- getQ
putQ (Deriving tyConName tyVar)
genFoldMapClause (shallowConstraint . substitute resultType instanceType)
(baseConstraint . substitute resultType instanceType)
instanceType (RecC name fields)
genFoldMapClause shallowConstraint baseConstraint instanceType (ForallC _vars _cxt con) =
genFoldMapClause shallowConstraint baseConstraint instanceType con
type GenTraverseFieldType = Q Exp -> Type -> (Q Type -> Q Type) -> (Q Type -> Q Type) -> Q Exp -> (Q Exp -> Q Exp)
-> Q ([Type], Exp)
genTraverseClause :: GenTraverseFieldType -> (Q Type -> Q Type) -> (Q Type -> Q Type) -> Q Type -> Con
-> Q ([Type], Clause)
genTraverseClause genField shallowConstraint baseConstraint _instanceType (NormalC name fieldTypes) = do
t <- newName "t"
fieldNames <- replicateM (length fieldTypes) (newName "x")
let pats = [varP t, parensP (conP name $ map varP fieldNames)]
constraintsAndFields = zipWith newField fieldNames fieldTypes
newFields = map (snd <$>) constraintsAndFields
body | null fieldTypes = [| pure $(conE name) |]
| otherwise = fst $ foldl apply (conE name, False) newFields
apply (a, False) b = ([| $(a) <$> $(b) |], True)
apply (a, True) b = ([| $(a) <*> $(b) |], True)
newField :: Name -> BangType -> Q ([Type], Exp)
newField x (_, fieldType) = genField (varE t) fieldType shallowConstraint baseConstraint (varE x) id
constraints <- (concat . (fst <$>)) <$> sequence constraintsAndFields
(,) constraints <$> clause pats (normalB body) []
genTraverseClause genField shallowConstraint baseConstraint _instanceType (RecC name fields) = do
f <- newName "f"
x <- newName "x"
let constraintsAndFields = map newNamedField fields
body | null fields = [| pure $(conE name) |]
| otherwise = fst (foldl apply (conE name, False) $ map (snd . snd <$>) constraintsAndFields)
apply (a, False) b = ([| $(a) <$> $(b) |], True)
apply (a, True) b = ([| $(a) <*> $(b) |], True)
newNamedField :: VarBangType -> Q ([Type], (Name, Exp))
newNamedField (fieldName, _, fieldType) =
((,) fieldName <$>)
<$> genField (varE f) fieldType shallowConstraint baseConstraint (appE (varE fieldName) (varE x)) id
constraints <- (concat . (fst <$>)) <$> sequence constraintsAndFields
(,) constraints <$> clause [varP f, x `asP` recP name []] (normalB body) []
genTraverseClause genField shallowConstraint baseConstraint instanceType
(GadtC [name] fieldTypes (AppT resultType (VarT tyVar))) =
do Just (Deriving tyConName _tyVar) <- getQ
putQ (Deriving tyConName tyVar)
genTraverseClause genField
(shallowConstraint . substitute resultType instanceType)
(baseConstraint . substitute resultType instanceType)
instanceType (NormalC name fieldTypes)
genTraverseClause genField shallowConstraint baseConstraint instanceType
(RecGadtC [name] fields (AppT resultType (VarT tyVar))) =
do Just (Deriving tyConName _tyVar) <- getQ
putQ (Deriving tyConName tyVar)
genTraverseClause genField
(shallowConstraint . substitute resultType instanceType)
(baseConstraint . substitute resultType instanceType)
instanceType (RecC name fields)
genTraverseClause genField shallowConstraint baseConstraint instanceType (ForallC _vars _cxt con) =
genTraverseClause genField shallowConstraint baseConstraint instanceType con
genShallowmapField :: Q Exp -> Type -> (Q Type -> Q Type) -> (Q Type -> Q Type) -> Q Exp -> (Q Exp -> Q Exp)
-> Q ([Type], Exp)
genShallowmapField trans fieldType shallowConstraint baseConstraint fieldAccess wrap = do
Just (Deriving _ typeVar) <- getQ
case fieldType of
AppT ty a | ty == VarT typeVar ->
(,) <$> ((:[]) <$> baseConstraint (pure a))
<*> (wrap (varE '(Transformation.$) `appE` trans) `appE` fieldAccess)
AppT t1 t2 | t2 == VarT typeVar -> (,) <$> traverse shallowConstraint [pure t1]
<*> appE (wrap [| ($trans Transformation.Shallow.<$>) |]) fieldAccess
AppT t1 t2 | t1 /= VarT typeVar ->
genShallowmapField trans t2 shallowConstraint baseConstraint fieldAccess (wrap . appE (varE '(<$>)))
SigT ty _kind -> genShallowmapField trans ty shallowConstraint baseConstraint fieldAccess wrap
ParensT ty -> genShallowmapField trans ty shallowConstraint baseConstraint fieldAccess wrap
_ -> (,) [] <$> fieldAccess
genFoldMapField :: Q Exp -> Type -> (Q Type -> Q Type) -> (Q Type -> Q Type) -> Q Exp -> (Q Exp -> Q Exp)
-> Q ([Type], Exp)
genFoldMapField trans fieldType shallowConstraint baseConstraint fieldAccess wrap = do
Just (Deriving _ typeVar) <- getQ
case fieldType of
AppT ty a | ty == VarT typeVar ->
(,) <$> ((:[]) <$> baseConstraint (pure a))
<*> (wrap (varE '(.) `appE` varE 'getConst `appE` (varE '(Transformation.$) `appE` trans))
`appE` fieldAccess)
AppT t1 t2 | t2 == VarT typeVar -> (,) <$> traverse shallowConstraint [pure t1]
<*> appE (wrap [| (Transformation.Shallow.foldMap $trans) |]) fieldAccess
AppT t1 t2 | t1 /= VarT typeVar ->
genFoldMapField trans t2 shallowConstraint baseConstraint fieldAccess (wrap . appE (varE 'foldMap))
SigT ty _kind -> genFoldMapField trans ty shallowConstraint baseConstraint fieldAccess wrap
ParensT ty -> genFoldMapField trans ty shallowConstraint baseConstraint fieldAccess wrap
_ -> (,) [] <$> [| mempty |]
genTraverseField :: GenTraverseFieldType
genTraverseField trans fieldType shallowConstraint baseConstraint fieldAccess wrap = do
Just (Deriving _ typeVar) <- getQ
case fieldType of
AppT ty a | ty == VarT typeVar ->
(,) <$> ((:[]) <$> baseConstraint (pure a))
<*> (wrap (varE '(.) `appE` varE 'getCompose `appE` (varE '(Transformation.$) `appE` trans))
`appE` fieldAccess)
AppT t1 t2 | t2 == VarT typeVar -> (,) <$> traverse shallowConstraint [pure t1]
<*> appE (wrap [| (Transformation.Shallow.traverse $trans) |]) fieldAccess
AppT t1 t2 | t1 /= VarT typeVar ->
genTraverseField trans t2 shallowConstraint baseConstraint fieldAccess (wrap . appE (varE 'traverse))
SigT ty _kind -> genTraverseField trans ty shallowConstraint baseConstraint fieldAccess wrap
ParensT ty -> genTraverseField trans ty shallowConstraint baseConstraint fieldAccess wrap
_ -> (,) [] <$> [| pure $fieldAccess |]
|
blamario/grampa
|
deep-transformations/src/Transformation/Shallow/TH.hs
|
bsd-2-clause
| 17,642 | 0 | 23 | 4,150 | 5,883 | 2,977 | 2,906 | 266 | 6 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QPainterPath.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:35
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Enums.Gui.QPainterPath (
ElementType, eMoveToElement, eLineToElement, eCurveToElement, eCurveToDataElement
)
where
import Foreign.C.Types
import Qtc.Classes.Base
import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr)
import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int)
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
data CElementType a = CElementType a
type ElementType = QEnum(CElementType Int)
ieElementType :: Int -> ElementType
ieElementType x = QEnum (CElementType x)
instance QEnumC (CElementType Int) where
qEnum_toInt (QEnum (CElementType x)) = x
qEnum_fromInt x = QEnum (CElementType x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> ElementType -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
eMoveToElement :: ElementType
eMoveToElement
= ieElementType $ 0
eLineToElement :: ElementType
eLineToElement
= ieElementType $ 1
eCurveToElement :: ElementType
eCurveToElement
= ieElementType $ 2
eCurveToDataElement :: ElementType
eCurveToDataElement
= ieElementType $ 3
|
keera-studios/hsQt
|
Qtc/Enums/Gui/QPainterPath.hs
|
bsd-2-clause
| 2,596 | 0 | 18 | 543 | 629 | 323 | 306 | 58 | 1 |
module Main (main) where
import Test.Framework (defaultMain)
import qualified Database.Redis.Tags.Test.Tags
main :: IO ()
main = defaultMain [
Database.Redis.Tags.Test.Tags.tests
]
|
akaspin/hedis-tags
|
test/Main.hs
|
bsd-2-clause
| 206 | 0 | 7 | 44 | 55 | 35 | 20 | 6 | 1 |
module UniversalSyntax(
VarName, DataConName, Literal,
Type,
var, dataCon, intLit, floatLit, charLit,
getVarName, getDataConName) where
-- This is a module for syntax elements that are the same across all intermediate
-- representations, from the core syntax to the imperative representation
data Type
= TypeCon String Type Type
| TypeVar String
| Integer
| Floating
| Character
deriving (Eq, Ord, Show)
data Literal
= IntLit Int
| FloatLit Double
| CharLit Char
deriving (Eq, Ord, Show)
intLit = IntLit
floatLit = FloatLit
charLit = CharLit
data VarName = VarName String
deriving (Eq, Ord, Show)
var = VarName
getVarName (VarName n) = n
data DataConName = DataConName String
deriving (Eq, Ord, Show)
dataCon = DataConName
getDataConName (DataConName n) = n
|
dillonhuff/AFL
|
src/UniversalSyntax.hs
|
bsd-3-clause
| 832 | 0 | 7 | 191 | 218 | 126 | 92 | 28 | 1 |
{-# LANGUAGE TypeOperators, CPP #-}
#include "macros.h"
LANGUAGE_UNSAFE
module Type.Eq.Higher.Unsafe (module Type.Eq.Unsafe, module Type.Eq.Higher.Unsafe) where
import Type.Eq.Unsafe
import {-# SOURCE #-} Type.Eq.Higher
import Unsafe.Coerce
-- | Very unsafe! The same rules apply as for 'unsafeCoerce'.
unsafeCoercion1 :: f ::~:: g
unsafeCoercion1 = unsafeCoerce Eq1
-- | Very unsafe! The same rules apply as for 'unsafeCoerce'.
unsafeCoercion2 :: m :::~::: n
unsafeCoercion2 = unsafeCoerce Eq2
-- | Very unsafe!
unsafeOuterEq1 :: OuterEq1 m f
unsafeOuterEq1 = unsafeCoerce OuterEq1
-- | Very unsafe!
unsafeInnerEq1 :: InnerEq1 a f
unsafeInnerEq1 = unsafeCoerce InnerEq1
|
glaebhoerl/type-eq
|
Type/Eq/Higher/Unsafe.hs
|
bsd-3-clause
| 679 | 0 | 8 | 98 | 131 | 73 | 58 | -1 | -1 |
{-# LANGUAGE RankNTypes #-}
{-| An ST Monad based interface to the CUDD BDD library
This is a straightforward wrapper around the C library. See <http://vlsi.colorado.edu/~fabio/CUDD/> for documentation.
Exampe usage:
> import Control.Monad.ST
> import Cudd.Imperative
>
> main = do
> res <- stToIO $ withManagerDefaults $ \manager -> do
> v1 <- ithVar manager 0
> v2 <- ithVar manager 1
> conj <- bAnd manager v1 v2
> implies <- lEq manager conj v1
> deref manager conj
> return implies
> print res
-}
module Cudd.Imperative (
DDManager(..),
DDNode(..),
cuddInit,
cuddInitDefaults,
withManager,
withManagerDefaults,
withManagerIO,
withManagerIODefaults,
shuffleHeap,
bZero,
bOne,
ithVar,
bAnd,
bOr,
bNand,
bNor,
bXor,
bXnor,
bNot,
bIte,
bExists,
bForall,
deref,
setVarMap,
varMap,
lEq,
swapVariables,
ref,
largestCube,
makePrime,
support,
supportIndices,
indicesToCube,
computeCube,
nodesToCube,
readSize,
bddToCubeArray,
compose,
andAbstract,
xorExistAbstract,
leqUnless,
equivDC,
xEqY,
debugCheck,
checkKeys,
pickOneMinterm,
toInt,
checkZeroRef,
readInvPerm,
readPerm,
dagSize,
readNodeCount,
readPeakNodeCount,
regular,
readMaxCache,
readMaxCacheHard,
setMaxCacheHard,
readCacheSlots,
readCacheUsedSlots,
cudd_unique_slots,
cudd_cache_slots,
andLimit,
readTree,
newVarAtLevel,
liCompaction,
squeeze,
minimize,
newVar,
vectorCompose,
quit,
readIndex,
printMinterm,
countMintermExact,
checkCube,
Cube,
Prime,
DDGen(..),
genFree,
isGenEmpty,
firstCube,
nextCube,
firstPrime,
nextPrime,
firstNode,
nextNode,
module Cudd.Common
) where
import Foreign hiding (void)
import Foreign.Ptr
import Foreign.C.Types
import Control.Monad.ST
import Control.Monad.ST.Unsafe
import Control.Monad
import Control.Monad.IO.Class
import Data.List
import System.IO.Unsafe
import Cudd.C
import Cudd.MTR
import Cudd.Common
newtype DDManager s u = DDManager {unDDManager :: Ptr CDDManager}
newtype DDNode s u = DDNode {unDDNode :: Ptr CDDNode} deriving (Ord, Eq, Show)
cuddInit :: Int -> Int -> Int -> Int -> Int -> ST s (DDManager s u)
cuddInit numVars numVarsZ numSlots cacheSize maxMemory = unsafeIOToST $ do
cm <- c_cuddInit (fromIntegral numVars) (fromIntegral numVarsZ) (fromIntegral numSlots) (fromIntegral cacheSize) (fromIntegral maxMemory)
return $ DDManager cm
cuddInitDefaults :: ST s (DDManager s u)
cuddInitDefaults = cuddInit 0 0 cudd_unique_slots cudd_cache_slots 0
withManager :: Int -> Int -> Int -> Int -> Int -> (forall u. DDManager s u -> ST s a) -> ST s a
withManager numVars numVarsZ numSlots cacheSize maxMemory f = do
res <- cuddInit numVars numVarsZ numSlots cacheSize maxMemory
f res
withManagerDefaults :: (forall u. DDManager s u -> ST s a) -> ST s a
withManagerDefaults f = do
res <- cuddInitDefaults
f res
withManagerIO :: MonadIO m => Int -> Int -> Int -> Int -> Int -> (forall u. DDManager RealWorld u -> m a) -> m a
withManagerIO numVars numVarsZ numSlots cacheSize maxMemory f = do
res <- liftIO $ stToIO $ cuddInit numVars numVarsZ numSlots cacheSize maxMemory
f res
withManagerIODefaults :: MonadIO m => (forall u. DDManager RealWorld u -> m a) -> m a
withManagerIODefaults f = do
res <- liftIO $ stToIO cuddInitDefaults
f res
shuffleHeap :: DDManager s u -> [Int] -> ST s ()
shuffleHeap (DDManager m) order = unsafeIOToST $
withArrayLen (map fromIntegral order) $ \size ptr -> do
when (sort order /= [0..size-1]) (error "shuffleHeap: order does not contain each variable once")
res1 <- c_cuddBddIthVar m (fromIntegral (size - 1))
when (res1 == nullPtr) (error "shuffleHeap: Failed to resize table")
res2 <- c_cuddShuffleHeap m ptr
when (fromIntegral res2 /= 1) (error "shuffleHeap: Cudd_ShuffleHeap failed")
return ()
toInt :: DDNode s u -> Int
toInt (DDNode n) = fromIntegral $ ptrToIntPtr n
arg0 :: (Ptr CDDManager -> IO (Ptr CDDNode)) -> DDManager s u -> ST s (DDNode s u)
arg0 f (DDManager m) = liftM DDNode $ unsafeIOToST $ f m
arg1 :: (Ptr CDDManager -> Ptr CDDNode -> IO (Ptr CDDNode)) -> DDManager s u -> DDNode s u -> ST s (DDNode s u)
arg1 f (DDManager m) (DDNode x) = liftM DDNode $ unsafeIOToST $ f m x
arg2 :: (Ptr CDDManager -> Ptr CDDNode -> Ptr CDDNode -> IO (Ptr CDDNode)) -> DDManager s u -> DDNode s u -> DDNode s u -> ST s (DDNode s u)
arg2 f (DDManager m) (DDNode x) (DDNode y) = liftM DDNode $ unsafeIOToST $ f m x y
arg3 :: (Ptr CDDManager -> Ptr CDDNode -> Ptr CDDNode -> Ptr CDDNode -> IO (Ptr CDDNode)) -> DDManager s u -> DDNode s u -> DDNode s u -> DDNode s u -> ST s (DDNode s u)
arg3 f (DDManager m) (DDNode x) (DDNode y) (DDNode z) = liftM DDNode $ unsafeIOToST $ f m x y z
bZero, bOne :: DDManager s u -> DDNode s u
bZero (DDManager m) = DDNode $ unsafePerformIO $ c_cuddReadLogicZero m
bOne (DDManager m) = DDNode $ unsafePerformIO $ c_cuddReadOne m
bAnd = arg2 c_cuddBddAnd
bOr = arg2 c_cuddBddOr
bNand = arg2 c_cuddBddNand
bNor = arg2 c_cuddBddNor
bXor = arg2 c_cuddBddXor
bXnor = arg2 c_cuddBddXnor
bIte = arg3 c_cuddBddIte
bExists = arg2 c_cuddBddExistAbstract
bForall = arg2 c_cuddBddUnivAbstract
andAbstract = arg3 c_cuddBddAndAbstract
xorExistAbstract = arg3 c_cuddBddXorExistAbstract
bNot :: DDNode s u -> DDNode s u
bNot (DDNode x) = DDNode $ unsafePerformIO $ c_cuddNotNoRef x
ithVar :: DDManager s u -> Int -> ST s (DDNode s u)
ithVar (DDManager m) i = liftM DDNode $ unsafeIOToST $ c_cuddBddIthVar m (fromIntegral i)
deref :: DDManager s u -> DDNode s u -> ST s ()
deref (DDManager m) (DDNode x) = unsafeIOToST $ c_cuddIterDerefBdd m x
setVarMap :: DDManager s u -> [DDNode s u] -> [DDNode s u] -> ST s ()
setVarMap (DDManager m) xs ys = unsafeIOToST $
withArrayLen (map unDDNode xs) $ \xl xp ->
withArrayLen (map unDDNode ys) $ \yl yp -> do
when (xl /= yl) (error "setVarMap: lengths not equal")
void $ c_cuddSetVarMap m xp yp (fromIntegral xl)
varMap :: DDManager s u -> DDNode s u -> ST s (DDNode s u)
varMap (DDManager m) (DDNode x) = liftM DDNode $ unsafeIOToST $ c_cuddBddVarMap m x
lEq :: DDManager s u -> DDNode s u -> DDNode s u -> ST s Bool
lEq (DDManager m) (DDNode x) (DDNode y) = liftM (==1) $ unsafeIOToST $ c_cuddBddLeq m x y
swapVariables :: DDManager s u -> [DDNode s u] -> [DDNode s u] -> DDNode s u -> ST s (DDNode s u)
swapVariables (DDManager m) nodesx nodesy (DDNode x) = unsafeIOToST $
withArrayLen (map unDDNode nodesx) $ \lx xp ->
withArrayLen (map unDDNode nodesy) $ \ly yp -> do
when (lx /= ly) $ error "CuddExplicitDeref: shift: lengths not equal"
res <- c_cuddBddSwapVariables m x xp yp (fromIntegral lx)
return $ DDNode res
ref :: DDNode s u -> ST s ()
ref (DDNode x) = unsafeIOToST $ cuddRef x
largestCube :: DDManager s u -> DDNode s u -> ST s (DDNode s u, Int)
largestCube (DDManager m) (DDNode x) = unsafeIOToST $
alloca $ \lp -> do
res <- c_cuddLargestCube m x lp
l <- peek lp
return (DDNode res, fromIntegral l)
makePrime :: DDManager s u -> DDNode s u -> DDNode s u -> ST s (DDNode s u)
makePrime = arg2 c_cuddBddMakePrime
support :: DDManager s u -> DDNode s u -> ST s (DDNode s u)
support = arg1 c_cuddSupport
supportIndices :: DDManager s u -> DDNode s u -> ST s [Int]
supportIndices (DDManager m) (DDNode x) = unsafeIOToST $
alloca $ \arrp -> do
sz <- c_cuddSupportIndices m x arrp
aaddr <- peek arrp
res <- peekArray (fromIntegral sz) aaddr
return $ map fromIntegral res
indicesToCube :: DDManager s u -> [Int] -> ST s (DDNode s u)
indicesToCube (DDManager m) indices = unsafeIOToST $
withArrayLen (map fromIntegral indices) $ \sz pt -> do
res <- c_cuddIndicesToCube m pt (fromIntegral sz)
return $ DDNode res
computeCube :: DDManager s u -> [DDNode s u] -> [Bool] -> ST s (DDNode s u)
computeCube (DDManager m) nodes phases = unsafeIOToST $
withArrayLen (map unDDNode nodes) $ \szn ptn ->
withArrayLen (map (fromIntegral . fromBool) phases) $ \szp ptp -> do
when (szn /= szp) $ error "computeCube: lists are different lengths"
res <- c_cuddBddComputeCube m ptn ptp (fromIntegral szn)
return $ DDNode res
nodesToCube :: DDManager s u -> [DDNode s u] -> ST s (DDNode s u)
nodesToCube (DDManager m) nodes = unsafeIOToST $
withArrayLen (map unDDNode nodes) $ \sz pt -> do
res <- c_cuddBddComputeCube m pt nullPtr (fromIntegral sz)
return $ DDNode res
readSize :: DDManager s u -> ST s Int
readSize (DDManager m) = liftM fromIntegral $ unsafeIOToST $ c_cuddReadSize m
bddToCubeArray :: DDManager s u -> DDNode s u -> ST s [SatBit]
bddToCubeArray ma@(DDManager m) (DDNode x) = unsafeIOToST $ do
size <- liftM fromIntegral $ c_cuddReadSize m
allocaArray size $ \resptr -> do
c_cuddBddToCubeArray m x resptr
res <- peekArray size resptr
return $ map (toSatBit . fromIntegral) res
compose :: DDManager s u -> DDNode s u -> DDNode s u -> Int -> ST s (DDNode s u)
compose (DDManager m) (DDNode f) (DDNode g) v = liftM DDNode $ unsafeIOToST $ c_cuddBddCompose m f g (fromIntegral v)
arg3Bool :: (Ptr CDDManager -> Ptr CDDNode -> Ptr CDDNode -> Ptr CDDNode -> IO CInt) -> DDManager s u -> DDNode s u -> DDNode s u -> DDNode s u -> ST s Bool
arg3Bool f (DDManager m) (DDNode x) (DDNode y) (DDNode z) = liftM (==1) $ unsafeIOToST $ f m x y z
leqUnless, equivDC :: DDManager s u -> DDNode s u -> DDNode s u -> DDNode s u -> ST s Bool
leqUnless = arg3Bool c_cuddBddLeqUnless
equivDC = arg3Bool c_cuddEquivDC
xEqY :: DDManager s u -> [DDNode s u] -> [DDNode s u] -> ST s (DDNode s u)
xEqY (DDManager m) xs ys = unsafeIOToST $
withArrayLen (map unDDNode xs) $ \xl xp ->
withArrayLen (map unDDNode ys) $ \yl yp -> do
when (xl /= yl) (error "xeqy: lengths not equal")
res <- c_cuddXeqy m (fromIntegral xl) xp yp
return $ DDNode res
debugCheck :: DDManager s u -> ST s Int
debugCheck (DDManager m) = liftM fromIntegral $ unsafeIOToST $ c_cuddDebugCheck m
checkKeys :: DDManager s u -> ST s Int
checkKeys (DDManager m) = liftM fromIntegral $ unsafeIOToST $ c_cuddCheckKeys m
pickOneMinterm :: DDManager s u -> DDNode s u -> [DDNode s u] -> ST s (DDNode s u)
pickOneMinterm (DDManager m) (DDNode d) vars = unsafeIOToST $
withArrayLen (map unDDNode vars) $ \vl vp -> do
res <- c_cuddBddPickOneMinterm m d vp (fromIntegral vl)
return $ DDNode res
checkZeroRef :: DDManager s u -> ST s Int
checkZeroRef (DDManager m) = liftM fromIntegral $ unsafeIOToST $ c_cuddCheckZeroRef m
readInvPerm :: DDManager s u -> Int -> ST s Int
readInvPerm (DDManager m) offs = liftM fromIntegral $ unsafeIOToST $ c_cuddReadInvPerm m (fromIntegral offs)
readPerm :: DDManager s u -> Int -> ST s Int
readPerm (DDManager m) offs = liftM fromIntegral $ unsafeIOToST $ c_cuddReadPerm m (fromIntegral offs)
dagSize :: DDNode s u -> ST s Int
dagSize (DDNode d) = liftM fromIntegral $ unsafeIOToST $ c_cuddDagSize d
readNodeCount :: DDManager s u -> ST s Integer
readNodeCount (DDManager m) = liftM fromIntegral $ unsafeIOToST $ c_cuddReadNodeCount m
readPeakNodeCount :: DDManager s u -> ST s Integer
readPeakNodeCount (DDManager m) = liftM fromIntegral $ unsafeIOToST $ c_cuddReadPeakNodeCount m
regular :: DDNode s u -> DDNode s u
regular (DDNode x) = DDNode $ unsafePerformIO $ c_wrappedRegular x
readMaxCache :: DDManager s u -> ST s Int
readMaxCache (DDManager m) = liftM fromIntegral $ unsafeIOToST $ c_cuddReadMaxCache m
readMaxCacheHard :: DDManager s u -> ST s Int
readMaxCacheHard (DDManager m) = liftM fromIntegral $ unsafeIOToST $ c_cuddReadMaxCacheHard m
setMaxCacheHard :: DDManager s u -> Int -> ST s ()
setMaxCacheHard (DDManager m) x = unsafeIOToST $ c_cuddSetMaxCacheHard m (fromIntegral x)
readCacheSlots :: DDManager s u -> ST s Int
readCacheSlots (DDManager m) = liftM fromIntegral $ unsafeIOToST $ c_cuddReadCacheSlots m
readCacheUsedSlots :: DDManager s u -> ST s Int
readCacheUsedSlots (DDManager m) = liftM fromIntegral $ unsafeIOToST $ c_cuddReadCacheUsedSlots m
andLimit :: DDManager s u -> DDNode s u -> DDNode s u -> Int -> ST s (Maybe (DDNode s u))
andLimit (DDManager m) (DDNode x) (DDNode y) lim = unsafeIOToST $ do
res <- c_cuddBddAndLimit m x y (fromIntegral lim)
if res==nullPtr then
return Nothing
else do
cuddRef res
return $ Just $ DDNode res
readTree :: DDManager s u -> ST s (MtrNode s)
readTree (DDManager m) = liftM MtrNode $ unsafeIOToST $ c_cuddReadTree m
newVarAtLevel :: DDManager s u -> Int -> ST s (DDNode s u)
newVarAtLevel (DDManager m) level = liftM DDNode $ unsafeIOToST $ c_cuddBddNewVarAtLevel m (fromIntegral level)
liCompaction = arg2 c_cuddBddLICompaction
squeeze = arg2 c_cuddBddSqueeze
minimize = arg2 c_cuddBddMinimize
newVar :: DDManager s u -> ST s (DDNode s u)
newVar (DDManager m) = liftM DDNode $ unsafeIOToST $ c_cuddBddNewVar m
vectorCompose :: DDManager s u -> DDNode s u -> [DDNode s u] -> ST s (DDNode s u)
vectorCompose (DDManager m) (DDNode f) nodes = liftM DDNode $ unsafeIOToST $ withArrayLen (map unDDNode nodes) $ \len ptr -> do
sz <- c_cuddReadSize m
when (fromIntegral sz /= len) (error "vectorCompose: not one entry for each variable in manager")
c_cuddBddVectorCompose m f ptr
quit :: DDManager s u -> ST s ()
quit (DDManager m) = unsafeIOToST $ c_cuddQuit m
readIndex :: DDNode s u -> ST s Int
readIndex (DDNode x) = liftM fromIntegral $ unsafeIOToST $ c_cuddNodeReadIndex x
printMinterm :: DDManager s u -> DDNode s u -> ST s ()
printMinterm (DDManager m) (DDNode x) = unsafeIOToST $ c_cuddPrintMinterm m x
countMintermExact :: DDManager s u -> DDNode s u -> Int -> ST s Integer
countMintermExact (DDManager m) (DDNode x) n = unsafeIOToST $
alloca $ \ sizep -> do
apa <- c_cuddApaCountMinterm m x (fromIntegral n) sizep
size <- fromIntegral <$> peek sizep
digits <- peekArray size apa
c_cuddFreeApaNumber apa
return $ foldl ( \ a d -> a * 2^32 + fromIntegral d ) 0 digits
checkCube :: DDManager s u -> DDNode s u -> ST s Bool
checkCube (DDManager m) (DDNode x) = liftM (==1) $ unsafeIOToST $ c_cuddCheckCube m x
data Cube
data Prime
data Node
data DDGen s u t = DDGen (Ptr CDDGen)
genFree :: DDGen s u t -> ST s ()
genFree (DDGen g) = void $ unsafeIOToST $ c_cuddGenFree g
isGenEmpty :: DDGen s u t -> ST s Bool
isGenEmpty (DDGen g) = liftM (==1) $ unsafeIOToST $ c_cuddIsGenEmpty g
firstCube :: DDManager s u -> DDNode s u -> ST s (Maybe ([SatBit], DDGen s u Cube))
firstCube (DDManager m) (DDNode n) = unsafeIOToST $ do
sz <- c_cuddReadSize m
alloca $ \cubePP ->
alloca $ \valP -> do
gen <- c_cuddFirstCube m n cubePP valP
empty <- c_cuddIsGenEmpty gen
if empty == 1 then do
c_cuddGenFree gen
return Nothing
else do
cubeP <- peek cubePP
cube <- peekArray (fromIntegral sz) cubeP
return $ Just (map (toSatBit . fromIntegral) cube, DDGen gen)
nextCube :: DDManager s u -> DDGen s u Cube -> ST s (Maybe [SatBit])
nextCube (DDManager m) (DDGen g) = unsafeIOToST $ do
sz <- c_cuddReadSize m
alloca $ \cubePP ->
alloca $ \valP -> do
c_cuddNextCube g cubePP valP
empty <- c_cuddIsGenEmpty g
if empty == 1 then do
c_cuddGenFree g
return Nothing
else do
cubeP <- peek cubePP
cube <- peekArray (fromIntegral sz) cubeP
return $ Just $ map (toSatBit . fromIntegral) cube
firstPrime :: DDManager s u -> DDNode s u -> DDNode s u -> ST s (Maybe ([SatBit], DDGen s u Prime))
firstPrime (DDManager m) (DDNode x) (DDNode y) = unsafeIOToST $ do
sz <- c_cuddReadSize m
alloca $ \cubePP -> do
gen <- c_cuddFirstPrime m x y cubePP
empty <- c_cuddIsGenEmpty gen
if empty == 1 then do
c_cuddGenFree gen
return Nothing
else do
cubeP <- peek cubePP
cube <- peekArray (fromIntegral sz) cubeP
return $ Just (map (toSatBit . fromIntegral) cube, DDGen gen)
nextPrime :: DDManager s u -> DDGen s u Prime -> ST s (Maybe [SatBit])
nextPrime (DDManager m) (DDGen g) = unsafeIOToST $ do
sz <- c_cuddReadSize m
alloca $ \cubePP -> do
c_cuddNextPrime g cubePP
empty <- c_cuddIsGenEmpty g
if empty == 1 then do
c_cuddGenFree g
return Nothing
else do
cubeP <- peek cubePP
cube <- peekArray (fromIntegral sz) cubeP
return $ Just $ map (toSatBit . fromIntegral) cube
firstNode :: DDManager s u -> DDNode s u -> ST s (Maybe (DDNode s u, DDGen s u Node))
firstNode (DDManager m) (DDNode x) = unsafeIOToST $ do
alloca $ \nodePP -> do
gen <- c_cuddFirstNode m x nodePP
empty <- c_cuddIsGenEmpty gen
if empty == 1 then do
c_cuddGenFree gen
return Nothing
else do
nodeP <- peek nodePP
return $ Just (DDNode nodeP, DDGen gen)
nextNode :: DDManager s u -> DDGen s u Node -> ST s (Maybe (DDNode s u))
nextNode (DDManager m) (DDGen g) = unsafeIOToST $ do
alloca $ \nodePP -> do
c_cuddNextNode g nodePP
empty <- c_cuddIsGenEmpty g
if empty == 1 then do
c_cuddGenFree g
return Nothing
else do
nodeP <- peek nodePP
return $ Just $ DDNode nodeP
|
adamwalker/haskell_cudd
|
Cudd/Imperative.hs
|
bsd-3-clause
| 17,777 | 0 | 22 | 4,358 | 6,842 | 3,323 | 3,519 | -1 | -1 |
module Linear.Cassowary.ClSimplexSolver where
-- addConstraint :: ClSimplexSolver -> ClConstraint -> IO ()
-- removeConstraint :: ClSimplexSolver -> ClConstraint -> IO ()
-- addEditVar :: ClSimplexSolver -> ClVariable -> ClStrength -> IO ()
-- removeEditVar :: ClSimplexSolver -> ClVariable -> IO ()
-- beginEdit :: ClSimplexSolver -> IO ()
-- suggestValue :: ClSimplexSolver -> ClVariable -> Double -> IO ()
-- endEdit :: ClSimplexSolver -> IO ()
-- resolve :: ClSimplexSolver -> IO ()
-- -- addPointStays :: ClSimplexSolver -> [(ClVariable, ClVariable)] -> IO () ?
-- setAutoSolve :: ClSimplexSolver -> Boolean -> IO ()
-- isAutoSolving :: ClSimplexSolver -> IO Boolean
-- solve :: ClSimplexSolver -> IO ()
-- reset :: ClSimplexSolver -> IO ()
|
athanclark/cassowary-haskell
|
src/Linear/Cassowary/ClSimplexSolver.hs
|
bsd-3-clause
| 761 | 0 | 3 | 130 | 20 | 18 | 2 | 1 | 0 |
module Util.Sort (quicksort) where
quicksort :: (Ord a) => [a] -> [a]
quicksort [] = []
quicksort (x:xs) = quicksort left ++ [x] ++ quicksort right
where left = [ y | y <- xs, y <= x ]
right = [ y | y <- xs, x < y ]
|
masateruk/haskell-dev-env
|
Util/Sort.hs
|
bsd-3-clause
| 232 | 0 | 9 | 67 | 129 | 70 | 59 | 6 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{- |
Description: low-level SMTP communciation.
-}
module Network.Mail.SMTP.SMTPRaw (
SMTPRaw(..)
, smtpConnect
, smtpSendCommand
, smtpSendCommandAndWait
, smtpSendRaw
, smtpGetReplyLines
, smtpDisconnect
) where
import qualified Data.ByteString as B
import Data.ByteString.Char8 (pack, unpack)
import Network
import Network.Socket
import Data.Attoparsec.ByteString.Char8
import System.IO
import Network.Mail.SMTP.ReplyLine
import Network.Mail.SMTP.Types
-- | An SMTPRaw has arbitrary push/pull/close methods, and ALWAYS a Handle,
-- but that Handle is not assumed to be the direct means by which we push
-- pull or close. This is for STARTTLS support.
data SMTPRaw = SMTPRaw {
smtpPush :: B.ByteString -> IO ()
, smtpPull :: IO B.ByteString
, smtpClose :: IO ()
, smtpHandle :: Handle
}
-- | Try to open an SMTPRaw, taking the server greeting as well.
-- No exception handling is performed.
smtpConnect :: String -> Int -> IO (SMTPRaw, Maybe Greeting)
smtpConnect host port = do
handle <- connectTo host (PortNumber $ fromIntegral port)
greet <- parseWith (B.hGetSome handle 2048) greeting ""
let push = B.hPut handle
let pull = B.hGetSome handle 2048
let close = hClose handle
return $ (SMTPRaw push pull close handle, maybeResult greet)
-- | Send an SMTP command and wait for the reply.
-- You get Nothing in case the reply does not parse.
-- No exception handling is performed.
smtpSendCommandAndWait :: SMTPRaw -> Command -> IO (Maybe [ReplyLine])
smtpSendCommandAndWait smtpraw cmd = do
smtpSendCommand smtpraw cmd
smtpGetReplyLines smtpraw
-- | Send an SMTP command.
-- No exception handling is performed.
smtpSendCommand :: SMTPRaw -> Command -> IO ()
smtpSendCommand smtpraw cmd = do
smtpSendRaw smtpraw (toByteString cmd)
smtpSendRaw smtpraw (pack "\r\n")
-- | Send a raw byte string. Use with care. No exception handling is performed.
smtpSendRaw :: SMTPRaw -> B.ByteString -> IO ()
smtpSendRaw = smtpPush
-- | Try to read ReplyLines from the SMTPRaw.
-- No exception handling is performed.
smtpGetReplyLines :: SMTPRaw -> IO (Maybe [ReplyLine])
smtpGetReplyLines smtpraw = do
replies <- parseWith (smtpPull smtpraw) replyLines ""
return $ maybeResult replies
-- | Close an SMTPRaw handle
-- Be sure not to use the SMTPHandle after this.
smtpDisconnect :: SMTPRaw -> IO ()
smtpDisconnect = smtpClose
|
avieth/smtp-mail-ng
|
Network/Mail/SMTP/SMTPRaw.hs
|
bsd-3-clause
| 2,500 | 0 | 11 | 509 | 518 | 278 | 240 | 46 | 1 |
{-# LANGUAGE CPP, QuasiQuotes, TemplateHaskell #-}
-- |
-- Template Haskell to generate defaultMain with a list of "Test" from
-- \"doc_test\", \"case_\<somthing\>\", and \"prop_\<somthing\>\".
--
-- An example of source code (Data/MySet.hs):
--
-- > {-| Creating a set from a list. O(N log N)
-- >
-- > >>> empty == fromList []
-- > True
-- > >>> singleton 'a' == fromList ['a']
-- > True
-- > >>> fromList [5,3,5] == fromList [5,3]
-- > True
-- > -}
-- >
-- > fromList :: Ord a => [a] -> RBTree a
-- > fromList = foldl' (flip insert) empty
--
-- An example of test code in the src directory (test/Test.hs):
--
-- > {-# LANGUAGE TemplateHaskell #-}
-- > module Main where
-- >
-- > import Test.Framework.TH.Prime
-- > import Test.Framework.Providers.DocTest
-- > import Test.Framework.Providers.HUnit
-- > import Test.Framework.Providers.QuickCheck2
-- > import Test.QuickCheck2
-- > import Test.HUnit
-- >
-- > import Data.MySet
-- >
-- > main :: IO ()
-- > main = $(defaultMainGenerator)
-- >
-- > doc_test :: DocTests
-- > doc_test = docTest ["../Data/MySet.hs"] ["-i.."]
-- >
-- > prop_toList :: [Int] -> Bool
-- > prop_toList xs = ordered ys
-- > where
-- > ys = toList . fromList $ xs
-- > ordered (x:y:xys) = x <= y && ordered (y:xys)
-- > ordered _ = True
-- >
-- > case_ticket4242 :: Assertion
-- > case_ticket4242 = (valid $ deleteMin $ deleteMin $ fromList [0,2,5,1,6,4,8,9,7,11,10,3]) @?= True
--
-- And run:
--
-- > test% runghc -i.. Test.hs
--
-- "defaultMainGenerator" generates the following:
--
-- > main = do
-- > TestGroup _ doctests <- docTest ["../Data/MySet.hs"] ["-i.."]
-- > defaultMain [
-- > testGroup "Doc tests" doctests
-- > , testGroup "Unit tests" [
-- > testCase "case_ticket4242" case_ticket4242
-- > ]
-- > , testGroup "Property tests" [
-- > testProperty "prop_toList" prop_toList
-- > ]
-- > ]
--
-- Note: examples in haddock document is only used as unit tests at this
-- moment. I hope that properties of QuickCheck2 can also be specified in
-- haddock document in the future. I guess it's Haskell way of Behavior
-- Driven Development.
module Test.Framework.TH.Prime (
defaultMainGenerator
, DocTests
) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
#endif
import Language.Haskell.TH hiding (Match)
import Language.Haskell.TH.Syntax hiding (Match)
import Test.Framework (defaultMain)
import Test.Framework.Providers.API
import Test.Framework.TH.Prime.Parser
----------------------------------------------------------------
-- | Type for \"doc_test\".
type DocTests = IO Test
----------------------------------------------------------------
{-|
Generating defaultMain with a list of "Test" from \"doc_test\",
\"case_\<somthing\>\", and \"prop_\<somthing\>\".
-}
defaultMainGenerator :: ExpQ
defaultMainGenerator = do
defined <- isDefined docTestKeyword
if defined then [|
do TestGroup _ doctests <- $(docTests)
let (unittests, proptests) = $(unitPropTests)
defaultMain [ testGroup "Doc tests" doctests
, testGroup "Unit tests" unittests
, testGroup "Property tests" proptests
]
|] else [|
do let (unittests, proptests) = $(unitPropTests)
defaultMain [ testGroup "Unit tests" unittests
, testGroup "Property tests" proptests
]
|]
----------------------------------------------------------------
-- code from Hiromi Ishii
isDefined :: String -> Q Bool
isDefined n = return False `recover` do
#if MIN_VERSION_template_haskell(2, 11, 0)
VarI (Name _ flavour) _ _ <- reify (mkName n)
#else
VarI (Name _ flavour) _ _ _ <- reify (mkName n)
#endif
modul <- loc_module <$> location
case flavour of
NameG ns _ mdl -> return (ns == VarName && modString mdl == modul)
_ -> return False
----------------------------------------------------------------
docTestKeyword :: String
docTestKeyword = "doc_test"
docTests :: ExpQ
docTests = return $ symbol docTestKeyword
|
kazu-yamamoto/test-framework-th-prime
|
Test/Framework/TH/Prime.hs
|
bsd-3-clause
| 4,160 | 0 | 15 | 934 | 349 | 231 | 118 | 35 | 2 |
module AI.MathTmp
where
import Data.List
-- Math functions copied from Math.Statistics because the whole thing wouldn't compile
mean :: Floating a => [a] -> a
mean x = fst $ foldl' (\(m, n) x -> (m+(x-m)/(n+1),n+1)) (0,0) x
-- mean x = fst $ foldl' (\(!m, !n) x -> (m+(x-m)/(n+1),n+1)) (0,0) x
-- |Arbitrary quantile q of an unsorted list. The quantile /q/ of /N/
-- |data points is the point whose (zero-based) index in the sorted
-- |data set is closest to /q(N-1)/.
quantile :: (Fractional b, Ord b) => Double -> [b] -> b
quantile q = quantileAsc q . sort
-- |As 'quantile' specialized for sorted data
quantileAsc :: (Fractional b, Ord b) => Double -> [b] -> b
quantileAsc _ [] = error "quantile on empty list"
quantileAsc q xs
| q < 0 || q > 1 = error "quantile out of range"
| otherwise = xs !! (quantIndex (length xs) q)
where quantIndex :: Int -> Double -> Int
quantIndex len q = case round $ q * (fromIntegral len - 1) of
idx | idx < 0 -> error "Quantile index too small"
| idx >= len -> error "Quantile index too large"
| otherwise -> idx
-- |Standard deviation of sample
stddev :: (Floating a) => [a] -> a
stddev xs = sqrt $ var xs
-- |Sample variance
var xs = (var' 0 0 0 xs) / (fromIntegral $ length xs - 1)
where
var' _ _ s [] = s
var' m n s (x:xs) = var' nm (n + 1) (s + delta * (x - nm)) xs
where
delta = x - m
nm = m + delta/(fromIntegral $ n + 1)
|
mikeizbicki/Classification
|
src/AI/MathTmp.hs
|
bsd-3-clause
| 1,562 | 0 | 13 | 482 | 534 | 279 | 255 | 23 | 2 |
{-# LANGUAGE
MultiParamTypeClasses
, TemplateHaskell
, ScopedTypeVariables
, FlexibleInstances
, FlexibleContexts
, UndecidableInstances
#-}
module Spire.Canonical.Checker where
import Control.Monad.Except
import Unbound.LocallyNameless hiding ( Spine )
import Spire.Canonical.Types
import Spire.Canonical.Evaluator
import Spire.Surface.PrettyPrinter
import Spire.Canonical.InitialEnv
----------------------------------------------------------------------
recheckProg :: VProg -> SpireM ()
recheckProg [] = return ()
recheckProg (VDef _ a _A : xs) = do
checkV _A VType
checkV a _A
recheckProg xs
return ()
----------------------------------------------------------------------
checkV :: Value -> Type -> SpireM ()
checkV VTT VUnit = return ()
checkV VTT _ = throwError "Ill-typed!"
checkV VTrue VBool = return ()
checkV VTrue _ = throwError "Ill-typed!"
checkV VFalse VBool = return ()
checkV VFalse _ = throwError "Ill-typed!"
checkV (VQuotes _) VString = return ()
checkV (VQuotes _) _ = throwError "Ill-typed!"
checkV VNil VEnum = return ()
checkV VNil _ = throwError "Ill-typed!"
checkV VUnit VType = return ()
checkV VUnit _ = throwError "Ill-typed!"
checkV VBool VType = return ()
checkV VBool _ = throwError "Ill-typed!"
checkV VString VType = return ()
checkV VString _ = throwError "Ill-typed!"
checkV VEnum VType = return ()
checkV VEnum _ = throwError "Ill-typed!"
checkV VTel VType = return ()
checkV VTel _ = throwError "Ill-typed!"
checkV VType VType = return ()
checkV VType _ = throwError "Ill-typed!"
checkV (VDesc _I) VType = checkV _I VType
checkV (VDesc _I) _ = throwError "Ill-typed!"
checkV (VTag _E) VType = checkV _E VEnum
checkV (VTag _E) _ = throwError "Ill-typed!"
checkV VHere (VTag (VCons l _E)) = return ()
checkV VHere _ = throwError "Ill-typed!"
checkV (VThere t) (VTag (VCons l _E)) = checkV t (VTag _E)
checkV (VThere _) _ = throwError "Ill-typed!"
checkV (VEq _A a _B b) VType = do
checkV _A VType
checkV a _A
checkV _B VType
checkV b _B
checkV (VEq _ _ _ _) _ =
throwError "Ill-typed!"
checkV (VSg _A _B) VType = do
checkV _A VType
checkVExtend _A _B VType
checkV (VSg _A _B) _ =
throwError "Ill-typed!"
checkV (VPi _A _B) VType = do
checkV _A VType
checkVExtend _A _B VType
checkV (VPi _A _B) _ =
throwError "Ill-typed!"
checkV (VFix l _P _I _D p i) VType = do
checkV l VString
checkV _P VType
checkV _I VType
checkV _D $ VDesc _I
checkV p _P
checkV i _I
checkV (VFix l _P _I _D p i) _ =
throwError "Ill-typed!"
checkV (VCons x xs) VEnum = do
checkV x VString
checkV xs VEnum
checkV (VCons x xs) _ =
throwError "Ill-typed!"
checkV (VLam bnd_b) (VPi _A bnd_B) = do
(nm_a , b) <- unbind bnd_b
_B <- bnd_B `sub` vVar nm_a
extendCtx nm_a _A $ checkV b _B
checkV (VLam _) _ =
throwError "Ill-typed!"
checkV (VPair a b) (VSg _A _B) = do
checkV a _A
checkV b =<< _B `sub` a
checkV (VPair _ _) _ =
throwError "Ill-typed!"
checkV VRefl (VEq _A a _B b) = do
unless (_A == _B) $
throwError "Ill-typed!"
unless (a == b) $
throwError "Ill-typed!"
checkV VRefl _ =
throwError "Ill-typed!"
checkV VEmp VTel = return ()
checkV VEmp _ = throwError "Ill-typed!"
checkV (VExt _A _B) VTel = do
checkV _A VType
checkVExtend _A _B VTel
checkV (VExt _A _B) _ =
throwError "Ill-typed!"
checkV (VEnd i) (VDesc _I) = do
checkV i _I
checkV (VEnd i) _ =
throwError "Ill-typed!"
checkV (VRec i _D) (VDesc _I) = do
checkV i _I
checkV _D (VDesc _I)
checkV (VRec i _D) _ =
throwError "Ill-typed!"
checkV (VArg _A _B) (VDesc _I) = do
checkV _A VType
checkVExtend _A _B (VDesc _I)
checkV (VArg _A _B) _ =
throwError "Ill-typed!"
checkV (VInit xs) (VFix l _P _I _D p i) = do
let _X = vBind "i" (\j -> VFix l _P _I _D p j)
checkV xs =<< _D `elim` EFunc _I _X i
checkV (VInit xs) _ =
throwError "Ill-typed!"
checkV x@(VNeut nm fs) _A = do
_A' <- inferN nm fs
unless (_A == _A') $
throwError $ "Ill-typed, checked type not equal to inferred type!\n\n" ++
"Checked type:\n" ++ prettyPrint _A ++
"\nInferred type:\n" ++ prettyPrint _A' ++
"\nValue:\n" ++ prettyPrint x
----------------------------------------------------------------------
inferN :: Nom -> Spine -> SpireM Type
inferN nm Id = lookupType nm
inferN nm (Pipe fs (EApp a)) = do
_AB <- inferN nm fs
case _AB of
VPi _A _B -> do
checkV a _A
_B `sub` a
_ -> throwError "Ill-typed!"
inferN nm (Pipe fs (EElimUnit _P ptt)) = do
checkVExtend VUnit _P VType
let u = VNeut nm fs
checkV u VUnit
_P `sub` u
inferN nm (Pipe fs (EElimBool _P ptrue pfalse)) = do
checkVExtend VBool _P VType
checkV ptrue =<< _P `sub` VTrue
checkV pfalse =<< _P `sub` VFalse
let b = VNeut nm fs
checkV b VBool
_P `sub` b
inferN nm (Pipe fs (EElimEq _A x _P prefl y)) = do
checkV _A VType
checkV x _A
checkVP _A x _P
checkV prefl =<< _P `sub2` (x , VRefl)
checkV y _A
let q = VNeut nm fs
checkV q (VEq _A x _A y)
_P `sub2` (y , q)
where
checkVP :: Type -> Value -> Bind Nom2 Value -> SpireM ()
checkVP _A x bnd_P = do
((y , q) , _P) <- unbind bnd_P
extendCtx y _A $ extendCtx q (VEq _A x _A (vVar y)) $ checkV _P VType
inferN nm (Pipe fs (EElimPair _A _B _P ppair)) = do
checkV _A VType
checkVExtend _A _B VType
checkVExtend (VSg _A _B) _P VType
checkVppair _A _B _P ppair
let ab = VNeut nm fs
checkV ab (VSg _A _B)
_P `sub` ab
where
checkVppair :: Type -> Bind Nom Type -> Bind Nom Type -> Bind Nom2 Value -> SpireM ()
checkVppair _A _B _P bnd_ppair = do
((a , b) , ppair) <- unbind bnd_ppair
_Ba <- _B `sub` vVar a
_Ppair <- _P `sub` VPair (vVar a) (vVar b)
extendCtx a _A $ extendCtx b _Ba $ checkV ppair _Ppair
inferN nm (Pipe fs (EElimEnum _P pnil pcons)) = do
let xs = VNeut nm fs
checkV xs VEnum
checkVExtend VEnum _P VType
checkV pnil =<< _P `sub` VNil
checkVpcons _P pcons
_P `sub` xs
where
checkVpcons :: Bind Nom Type -> Bind Nom3 Value -> SpireM ()
checkVpcons _P bnd_pcons = do
((nm_x , nm_xs , nm_pxs) , pcons) <- unbind bnd_pcons
_Pxs <- _P `sub` vVar nm_xs
_Pcons <- _P `sub` VCons (vVar nm_x) (vVar nm_xs)
extendCtx nm_x VString $ extendCtx nm_xs VEnum $ extendCtx nm_pxs _Pxs $ checkV pcons _Pcons
inferN nm (Pipe fs (EElimTel _P pemp pext)) = do
checkVExtend VTel _P VType
checkV pemp =<< _P `sub` VEmp
checkVpext _P pext
let _T = VNeut nm fs
checkV _T VTel
_P `sub` _T
where
checkVpext :: Bind Nom Type -> Bind Nom3 Value -> SpireM ()
checkVpext _P bnd_pext = do
((_A , _B , pb) , pext) <- unbind bnd_pext
let nm_a = "a"
_Ba <- _P `sub` vApp' _B (var nm_a)
let _PB = VPi (vVar _A) (sbind nm_a _Ba)
_PExt <- _P `sub` VExt (vVar _A) (fbind' _B nm_a)
extendCtx _A VType $ extendCtx _B (vVar _A `vArr` VTel) $ extendCtx pb _PB $ checkV pext _PExt
inferN nm (Pipe fs (EElimDesc _I _P pend prec parg)) = do
let _D = VNeut nm fs
checkV _I VType
checkV _D (VDesc _I)
checkVExtend (VDesc _I) _P VType
checkVpend _I _P pend
checkVprec _I _P prec
checkVparg _I _P parg
_P `sub` _D
where
checkVpend :: Value -> Bind Nom Type -> Bind Nom Value -> SpireM ()
checkVpend _I _P bnd_pend = do
(i , pend) <- unbind bnd_pend
_Pi <- _P `sub` VEnd (vVar i)
extendCtx i _I $ checkV pend _Pi
checkVprec :: Value -> Bind Nom Type -> Bind Nom3 Value -> SpireM ()
checkVprec _I _P bnd_prec = do
((i , _D , pd) , prec) <- unbind bnd_prec
_PD <- _P `sub` (vVar _D)
_PRec <- _P `sub` VRec (vVar i) (vVar _D)
extendCtx i _I $ extendCtx _D (VDesc _I) $ extendCtx pd _PD $ checkV prec _PRec
checkVparg :: Value -> Bind Nom Type -> Bind Nom3 Value -> SpireM ()
checkVparg _I _P bnd_parg = do
((_A , _B , pb) , parg) <- unbind bnd_parg
let nm_a = "a"
_Ba <- _P `sub` vApp' _B (var nm_a)
let _PB = VPi (vVar _A) (sbind nm_a _Ba)
_PArg <- _P `sub` VArg (vVar _A) (fbind' _B nm_a)
extendCtx _A VType $ extendCtx _B (vVar _A `vArr` VDesc _I) $ extendCtx pb _PB $ checkV parg _PArg
inferN nm (Pipe fs (EFunc _I _X i)) = do
checkV _I VType
let _D = VNeut nm fs
checkV _D (VDesc _I)
checkVExtend _I _X VType
checkV i _I
return VType
inferN nm (Pipe fs (EHyps _I _X _M i xs)) = do
checkV _I VType
let _D = VNeut nm fs
checkV _D (VDesc _I)
checkVExtend _I _X VType
checkVM _I _X _M
checkV i _I
checkV xs =<< _D `elim` EFunc _I _X i
return VType
inferN nm (Pipe fs (EProve _I _X _M m i xs)) = do
checkV _I VType
let _D = VNeut nm fs
checkV _D (VDesc _I)
checkVExtend _I _X VType
checkVM _I _X _M
checkVm _I _X _M m
checkV i _I
checkV xs =<< _D `elim` EFunc _I _X i
_D `elim` EHyps _I _X _M i xs
where
checkVm :: Type -> Bind Nom Type -> Bind Nom2 Type -> Bind Nom2 Type -> SpireM ()
checkVm _I _X _M bnd_m = do
((i , x) , m) <- unbind bnd_m
_Xi <- _X `sub` vVar i
_Mix <- _M `sub2` (vVar i , vVar x)
extendCtx i _I $ extendCtx x _Xi $ checkV m _Mix
inferN nm (Pipe fs (EInd l _P _I _D p _M m i)) = do
checkV l VString
checkV _P VType
checkV _I VType
checkV _D (VDesc _I)
checkV p _P
checkVM l _P _I _D p i _M
checkVm l _P _I _D p i _M m
checkV i _I
let x = VNeut nm fs
checkV x (VFix l _P _I _D p i)
_M `sub2` (i , x)
where
checkVM :: Value -> Type -> Type -> Value -> Value -> Value -> Bind Nom2 Type -> SpireM ()
checkVM l _P _I _D p i bnd_M = do
((i , x) , _M) <- unbind bnd_M
let _X = VFix l _P _I _D p (vVar i)
extendCtx i _I $ extendCtx x _X $ checkV _M VType
checkVm :: Value -> Type -> Type -> Value -> Value -> Value -> Bind Nom2 Type -> Bind Nom3 Type -> SpireM ()
checkVm l _P _I _D p i _M bnd_m = do
((i , xs , ihs) , m) <- unbind bnd_m
let _X = vBind "i" (\j -> VFix l _P _I _D p j)
_Xs <- _D `elim` EFunc _I _X (vVar i)
_IHs <- _D `elim` EHyps _I _X _M (vVar i) (vVar xs)
_Mix <- _M `sub2` (vVar i , VInit (vVar xs))
extendCtx i _I $ extendCtx xs _Xs $ extendCtx ihs _IHs $ checkV m _Mix
inferN nm (Pipe fs (EBranches _P)) = do
let _E = VNeut nm fs
checkV _E VEnum
checkVExtend (VTag _E) _P VType
return VType
inferN nm (Pipe fs (ECase _E _P cs)) = do
let t = VNeut nm fs
checkV _E VEnum
checkV t (VTag _E)
checkVExtend (VTag _E) _P VType
checkV cs =<< _E `elim` EBranches _P
_P `sub` t
----------------------------------------------------------------------
checkVM :: Type -> Bind Nom Type -> Bind Nom2 Type -> SpireM ()
checkVM _I _X bnd_M = do
((i , x) , _M) <- unbind bnd_M
_Xi <- _X `sub` vVar i
extendCtx i _I $ extendCtx x _Xi $ checkV _M VType
----------------------------------------------------------------------
checkVExtend :: Type -> Bind Nom Value -> Type -> SpireM ()
checkVExtend _A bnd_b _B = do
(x , b) <- unbind bnd_b
extendCtx x _A $ checkV b _B
----------------------------------------------------------------------
|
spire/spire
|
src/Spire/Canonical/Checker.hs
|
bsd-3-clause
| 11,060 | 0 | 17 | 2,804 | 5,091 | 2,404 | 2,687 | 316 | 2 |
module Parser where
import Control.Applicative
import Control.Monad
import Data.Char
newtype Parser a = Parser (String -> [(a, String)])
apply :: Parser a -> String -> [(a, String)]
apply (Parser f) s = f s
parse :: Parser a -> String -> a
parse m s = one [x | (x, t) <- apply m s, t== ""]
where
one [] = error "no parse"
one [x] = x
one xs | length xs > 1 = error "ambiguous parse"
instance Functor Parser where
fmap = liftM
instance Applicative Parser where
pure = return
(<*>) = ap
instance Monad Parser where
return x = Parser (\s->[(x,s)])
m >>= k = Parser( \s->
[ (y, u) |
(x, t) <-apply m s,
(y, u) <-apply (k x) t
])
instance Alternative Parser where
empty = mzero
(<|>) = mplus
instance MonadPlus Parser where
mzero = Parser (\s-> [])
mplus m n = Parser (\s -> apply m s ++ apply n s)
char :: Parser Char
char = Parser$ \s ->
case s of
[] -> mzero
(x:xs) -> return (x,xs)
--Parser.apply char "aaa"
spot::(Char -> Bool) -> Parser Char
spot f = char >>= \c -> guard (f c) >>= \_->return c
--Parser.apply (spot isDigit) "123"
parseDigit :: Parser Char
parseDigit = spot isDigit
token:: Char -> Parser Char
token c = spot (==c)
--Parser.apply (token 'a') "123"
addsth :: Parser String
--addsth = spot isDigit >>= \a->return (a:[])
addsth = spot isDigit >>= \a->token '+'>>= \b->spot isDigit >>= \c-> return $show a ++ "+" ++ show c
--Parser.apply addsth "1+2ddd"
matchEx2 :: String -> Parser String
matchEx2 (x:xs) = do
y <- token x
ys <- match xs
return $y : ys
matchEx :: String -> Parser String
matchEx s = sequence (map token s)
-- Parser.apply (matchEx "aa") "aa123"
match :: String -> Parser String
match = mapM token
-- Parser.apply (matchEx2 "aa") "aa123"
test :: Parser [String]
test =
return []
--Parser.apply test "123"
star :: Parser a -> Parser [a]
star p = plusEx p `mplus` return []
-- Parser.apply (star $ spot isDigit) "123"
plus :: Parser a -> Parser[a]
plus p =
p >>= \x -> star p >>= \xs -> return $ x : xs
-- Parser.apply (plus $ spot isDigit) "123"
-- parse (plus $ spot isDigit) "123"
plusEx :: Parser a -> Parser [a]
plusEx a = do
x <- a
xs <- star a
return $ x:xs
-- Parser.apply (plusEx $ spot isDigit) "123"
--Parser.apply (star $ token 'a' `mplus` token 'b') "a1234"
parseNat :: Parser Int
parseNat = plus parseDigit >>= \s-> return $ read s
--Parser.apply parseNat "1234"
parseNeg :: Parser Int
parseNeg = token '-' >> parseNat >>= \n -> return $ -n
--Parser.apply parseNat "1234"
parseInt :: Parser Int
parseInt = parseNat `mplus` parseNeg
--Parser.apply parseInt "-1234"
--Parser.apply parseInt "1234"
|
bzhkl/MonadTry
|
LibParser/Parser.hs
|
bsd-3-clause
| 2,792 | 0 | 14 | 727 | 1,026 | 536 | 490 | 70 | 3 |
{-# LANGUAGE BangPatterns #-}
module Network.DNS.Cache.Cache (
CacheRef
, newCacheRef
, lookupCacheRef
, insertCacheRef
, pruneCacheRef
) where
import Control.Applicative ((<$>))
import Data.IORef (newIORef, readIORef, atomicModifyIORef', IORef)
import Data.OrdPSQ (OrdPSQ)
import qualified Data.OrdPSQ as PSQ
import Network.DNS.Cache.Types
type PSQ = OrdPSQ
newtype CacheRef = CacheRef (IORef (PSQ Key Prio Entry))
newCacheRef :: IO CacheRef
newCacheRef = CacheRef <$> newIORef PSQ.empty
lookupCacheRef :: Key -> CacheRef -> IO (Maybe (Prio, Entry))
lookupCacheRef key (CacheRef ref) = PSQ.lookup key <$> readIORef ref
insertCacheRef :: Key -> Prio -> Entry -> CacheRef -> IO ()
insertCacheRef key tim ent (CacheRef ref) =
atomicModifyIORef' ref $ \q -> (PSQ.insert key tim ent q, ())
pruneCacheRef :: Prio -> CacheRef -> IO ()
pruneCacheRef tim (CacheRef ref) =
atomicModifyIORef' ref $ \p -> (snd (PSQ.atMostView tim p), ())
|
kazu-yamamoto/concurrent-dns-cache
|
Network/DNS/Cache/Cache.hs
|
bsd-3-clause
| 957 | 0 | 11 | 162 | 334 | 184 | 150 | 24 | 1 |
-- generated by derive.hs
module Prose.Internal.GraphemeBreakTest where
graphemebreaktest = [
[" "," "],
[" \776"," "],
[" ","\r"],
[" \776","\r"],
[" ","\n"],
[" \776","\n"],
[" ","\SOH"],
[" \776","\SOH"],
[" \768"],
[" \776\768"],
[" \2307"],
[" \776\2307"],
[" ","\4352"],
[" \776","\4352"],
[" ","\4448"],
[" \776","\4448"],
[" ","\4520"],
[" \776","\4520"],
[" ","\44032"],
[" \776","\44032"],
[" ","\44033"],
[" \776","\44033"],
[" ","\127462"],
[" \776","\127462"],
[" ","\888"],
[" \776","\888"],
[" ","\55296"],
[" \776","\55296"],
["\r"," "],
["\r","\776"," "],
["\r","\r"],
["\r","\776","\r"],
["\r\n"],
["\r","\776","\n"],
["\r","\SOH"],
["\r","\776","\SOH"],
["\r","\768"],
["\r","\776\768"],
["\r","\2307"],
["\r","\776\2307"],
["\r","\4352"],
["\r","\776","\4352"],
["\r","\4448"],
["\r","\776","\4448"],
["\r","\4520"],
["\r","\776","\4520"],
["\r","\44032"],
["\r","\776","\44032"],
["\r","\44033"],
["\r","\776","\44033"],
["\r","\127462"],
["\r","\776","\127462"],
["\r","\888"],
["\r","\776","\888"],
["\r","\55296"],
["\r","\776","\55296"],
["\n"," "],
["\n","\776"," "],
["\n","\r"],
["\n","\776","\r"],
["\n","\n"],
["\n","\776","\n"],
["\n","\SOH"],
["\n","\776","\SOH"],
["\n","\768"],
["\n","\776\768"],
["\n","\2307"],
["\n","\776\2307"],
["\n","\4352"],
["\n","\776","\4352"],
["\n","\4448"],
["\n","\776","\4448"],
["\n","\4520"],
["\n","\776","\4520"],
["\n","\44032"],
["\n","\776","\44032"],
["\n","\44033"],
["\n","\776","\44033"],
["\n","\127462"],
["\n","\776","\127462"],
["\n","\888"],
["\n","\776","\888"],
["\n","\55296"],
["\n","\776","\55296"],
["\SOH"," "],
["\SOH","\776"," "],
["\SOH","\r"],
["\SOH","\776","\r"],
["\SOH","\n"],
["\SOH","\776","\n"],
["\SOH","\SOH"],
["\SOH","\776","\SOH"],
["\SOH","\768"],
["\SOH","\776\768"],
["\SOH","\2307"],
["\SOH","\776\2307"],
["\SOH","\4352"],
["\SOH","\776","\4352"],
["\SOH","\4448"],
["\SOH","\776","\4448"],
["\SOH","\4520"],
["\SOH","\776","\4520"],
["\SOH","\44032"],
["\SOH","\776","\44032"],
["\SOH","\44033"],
["\SOH","\776","\44033"],
["\SOH","\127462"],
["\SOH","\776","\127462"],
["\SOH","\888"],
["\SOH","\776","\888"],
["\SOH","\55296"],
["\SOH","\776","\55296"],
["\768"," "],
["\768\776"," "],
["\768","\r"],
["\768\776","\r"],
["\768","\n"],
["\768\776","\n"],
["\768","\SOH"],
["\768\776","\SOH"],
["\768\768"],
["\768\776\768"],
["\768\2307"],
["\768\776\2307"],
["\768","\4352"],
["\768\776","\4352"],
["\768","\4448"],
["\768\776","\4448"],
["\768","\4520"],
["\768\776","\4520"],
["\768","\44032"],
["\768\776","\44032"],
["\768","\44033"],
["\768\776","\44033"],
["\768","\127462"],
["\768\776","\127462"],
["\768","\888"],
["\768\776","\888"],
["\768","\55296"],
["\768\776","\55296"],
["\2307"," "],
["\2307\776"," "],
["\2307","\r"],
["\2307\776","\r"],
["\2307","\n"],
["\2307\776","\n"],
["\2307","\SOH"],
["\2307\776","\SOH"],
["\2307\768"],
["\2307\776\768"],
["\2307\2307"],
["\2307\776\2307"],
["\2307","\4352"],
["\2307\776","\4352"],
["\2307","\4448"],
["\2307\776","\4448"],
["\2307","\4520"],
["\2307\776","\4520"],
["\2307","\44032"],
["\2307\776","\44032"],
["\2307","\44033"],
["\2307\776","\44033"],
["\2307","\127462"],
["\2307\776","\127462"],
["\2307","\888"],
["\2307\776","\888"],
["\2307","\55296"],
["\2307\776","\55296"],
["\4352"," "],
["\4352\776"," "],
["\4352","\r"],
["\4352\776","\r"],
["\4352","\n"],
["\4352\776","\n"],
["\4352","\SOH"],
["\4352\776","\SOH"],
["\4352\768"],
["\4352\776\768"],
["\4352\2307"],
["\4352\776\2307"],
["\4352\4352"],
["\4352\776","\4352"],
["\4352\4448"],
["\4352\776","\4448"],
["\4352","\4520"],
["\4352\776","\4520"],
["\4352\44032"],
["\4352\776","\44032"],
["\4352\44033"],
["\4352\776","\44033"],
["\4352","\127462"],
["\4352\776","\127462"],
["\4352","\888"],
["\4352\776","\888"],
["\4352","\55296"],
["\4352\776","\55296"],
["\4448"," "],
["\4448\776"," "],
["\4448","\r"],
["\4448\776","\r"],
["\4448","\n"],
["\4448\776","\n"],
["\4448","\SOH"],
["\4448\776","\SOH"],
["\4448\768"],
["\4448\776\768"],
["\4448\2307"],
["\4448\776\2307"],
["\4448","\4352"],
["\4448\776","\4352"],
["\4448\4448"],
["\4448\776","\4448"],
["\4448\4520"],
["\4448\776","\4520"],
["\4448","\44032"],
["\4448\776","\44032"],
["\4448","\44033"],
["\4448\776","\44033"],
["\4448","\127462"],
["\4448\776","\127462"],
["\4448","\888"],
["\4448\776","\888"],
["\4448","\55296"],
["\4448\776","\55296"],
["\4520"," "],
["\4520\776"," "],
["\4520","\r"],
["\4520\776","\r"],
["\4520","\n"],
["\4520\776","\n"],
["\4520","\SOH"],
["\4520\776","\SOH"],
["\4520\768"],
["\4520\776\768"],
["\4520\2307"],
["\4520\776\2307"],
["\4520","\4352"],
["\4520\776","\4352"],
["\4520","\4448"],
["\4520\776","\4448"],
["\4520\4520"],
["\4520\776","\4520"],
["\4520","\44032"],
["\4520\776","\44032"],
["\4520","\44033"],
["\4520\776","\44033"],
["\4520","\127462"],
["\4520\776","\127462"],
["\4520","\888"],
["\4520\776","\888"],
["\4520","\55296"],
["\4520\776","\55296"],
["\44032"," "],
["\44032\776"," "],
["\44032","\r"],
["\44032\776","\r"],
["\44032","\n"],
["\44032\776","\n"],
["\44032","\SOH"],
["\44032\776","\SOH"],
["\44032\768"],
["\44032\776\768"],
["\44032\2307"],
["\44032\776\2307"],
["\44032","\4352"],
["\44032\776","\4352"],
["\44032\4448"],
["\44032\776","\4448"],
["\44032\4520"],
["\44032\776","\4520"],
["\44032","\44032"],
["\44032\776","\44032"],
["\44032","\44033"],
["\44032\776","\44033"],
["\44032","\127462"],
["\44032\776","\127462"],
["\44032","\888"],
["\44032\776","\888"],
["\44032","\55296"],
["\44032\776","\55296"],
["\44033"," "],
["\44033\776"," "],
["\44033","\r"],
["\44033\776","\r"],
["\44033","\n"],
["\44033\776","\n"],
["\44033","\SOH"],
["\44033\776","\SOH"],
["\44033\768"],
["\44033\776\768"],
["\44033\2307"],
["\44033\776\2307"],
["\44033","\4352"],
["\44033\776","\4352"],
["\44033","\4448"],
["\44033\776","\4448"],
["\44033\4520"],
["\44033\776","\4520"],
["\44033","\44032"],
["\44033\776","\44032"],
["\44033","\44033"],
["\44033\776","\44033"],
["\44033","\127462"],
["\44033\776","\127462"],
["\44033","\888"],
["\44033\776","\888"],
["\44033","\55296"],
["\44033\776","\55296"],
["\127462"," "],
["\127462\776"," "],
["\127462","\r"],
["\127462\776","\r"],
["\127462","\n"],
["\127462\776","\n"],
["\127462","\SOH"],
["\127462\776","\SOH"],
["\127462\768"],
["\127462\776\768"],
["\127462\2307"],
["\127462\776\2307"],
["\127462","\4352"],
["\127462\776","\4352"],
["\127462","\4448"],
["\127462\776","\4448"],
["\127462","\4520"],
["\127462\776","\4520"],
["\127462","\44032"],
["\127462\776","\44032"],
["\127462","\44033"],
["\127462\776","\44033"],
["\127462\127462"],
["\127462\776","\127462"],
["\127462","\888"],
["\127462\776","\888"],
["\127462","\55296"],
["\127462\776","\55296"],
["\888"," "],
["\888\776"," "],
["\888","\r"],
["\888\776","\r"],
["\888","\n"],
["\888\776","\n"],
["\888","\SOH"],
["\888\776","\SOH"],
["\888\768"],
["\888\776\768"],
["\888\2307"],
["\888\776\2307"],
["\888","\4352"],
["\888\776","\4352"],
["\888","\4448"],
["\888\776","\4448"],
["\888","\4520"],
["\888\776","\4520"],
["\888","\44032"],
["\888\776","\44032"],
["\888","\44033"],
["\888\776","\44033"],
["\888","\127462"],
["\888\776","\127462"],
["\888","\888"],
["\888\776","\888"],
["\888","\55296"],
["\888\776","\55296"],
["\55296"," "],
["\55296","\776"," "],
["\55296","\r"],
["\55296","\776","\r"],
["\55296","\n"],
["\55296","\776","\n"],
["\55296","\SOH"],
["\55296","\776","\SOH"],
["\55296","\768"],
["\55296","\776\768"],
["\55296","\2307"],
["\55296","\776\2307"],
["\55296","\4352"],
["\55296","\776","\4352"],
["\55296","\4448"],
["\55296","\776","\4448"],
["\55296","\4520"],
["\55296","\776","\4520"],
["\55296","\44032"],
["\55296","\776","\44032"],
["\55296","\44033"],
["\55296","\776","\44033"],
["\55296","\127462"],
["\55296","\776","\127462"],
["\55296","\888"],
["\55296","\776","\888"],
["\55296","\55296"],
["\55296","\776","\55296"],
["a","\127462","b"],
["\127479\127482"],
["\127479\127482\127480"],
["\127479\127482\127480\127466"],
["\127479\127482","\8203","\127480\127466"],
["\127462\127463\127464"],
["\127462\8205","\127463\127464"],
["\127462\127463\8205","\127464"],
[" \8205","\1606"],
["\1606\8205"," "] ]
|
llelf/prose
|
Prose/Internal/GraphemeBreakTest.hs
|
bsd-3-clause
| 9,759 | 0 | 6 | 2,079 | 3,613 | 2,409 | 1,204 | 404 | 1 |
-- -----------------------------------------------------------------------------
--
-- (c) The University of Glasgow, 2011
--
-- Generate code to initialise cost centres
--
-- -----------------------------------------------------------------------------
module ProfInit (profilingInitCode) where
import GhcPrelude
import GHC.Cmm.CLabel
import CostCentre
import DynFlags
import Outputable
import Module
-- -----------------------------------------------------------------------------
-- Initialising cost centres
-- We must produce declarations for the cost-centres defined in this
-- module;
profilingInitCode :: Module -> CollectedCCs -> SDoc
profilingInitCode this_mod (local_CCs, singleton_CCSs)
= sdocWithDynFlags $ \dflags ->
if not (gopt Opt_SccProfilingOn dflags)
then empty
else vcat
$ map emit_cc_decl local_CCs
++ map emit_ccs_decl singleton_CCSs
++ [emit_cc_list local_CCs]
++ [emit_ccs_list singleton_CCSs]
++ [ text "static void prof_init_" <> ppr this_mod
<> text "(void) __attribute__((constructor));"
, text "static void prof_init_" <> ppr this_mod <> text "(void)"
, braces (vcat
[ text "registerCcList" <> parens local_cc_list_label <> semi
, text "registerCcsList" <> parens singleton_cc_list_label <> semi
])
]
where
emit_cc_decl cc =
text "extern CostCentre" <+> cc_lbl <> text "[];"
where cc_lbl = ppr (mkCCLabel cc)
local_cc_list_label = text "local_cc_" <> ppr this_mod
emit_cc_list ccs =
text "static CostCentre *" <> local_cc_list_label <> text "[] ="
<+> braces (vcat $ [ ppr (mkCCLabel cc) <> comma
| cc <- ccs
] ++ [text "NULL"])
<> semi
emit_ccs_decl ccs =
text "extern CostCentreStack" <+> ccs_lbl <> text "[];"
where ccs_lbl = ppr (mkCCSLabel ccs)
singleton_cc_list_label = text "singleton_cc_" <> ppr this_mod
emit_ccs_list ccs =
text "static CostCentreStack *" <> singleton_cc_list_label <> text "[] ="
<+> braces (vcat $ [ ppr (mkCCSLabel cc) <> comma
| cc <- ccs
] ++ [text "NULL"])
<> semi
|
sdiehl/ghc
|
compiler/profiling/ProfInit.hs
|
bsd-3-clause
| 2,228 | 0 | 17 | 556 | 482 | 245 | 237 | 43 | 2 |
module Graphics.Volume.MarchingCubes where
import Graphics.Volume.MarchingCubesTables
import Numeric.ScalarField
import Control.Lens
import Data.Bits
import qualified Data.Vector as V
import Linear
-- | Calculates the isosurface of a scalar field in three dimensional euclidian space.
marchingCubes :: (Enum a, Ord a, Epsilon a, Floating a, ScalarField s (V3 a) (V3 a) a)
=> s -- ^ the isosurface
-> a -- ^ iso level
-> V3 a -- ^ region origin
-> V3 Int -- ^ number of cubes in each direction
-> a -- ^ cube size
-> [[(V3 a, V3 a)]] -- ^ a list of triangle vertices consisting of position and normal
marchingCubes field isoLevel (V3 x0 y0 z0) (V3 nx ny nz) cubeSize
= map handleCube positions
where
-- positions of all cubes in the specified region
positions =
[V3 (x0 + cubeSize * realToFrac x) (y0 + cubeSize * realToFrac y) (z0 + cubeSize * realToFrac z)
| x <- [0..nx-1]
, y <- [0..ny-1]
, z <- [0..nz-1]
]
-- returns gradient and density in the scalar field as 4 dimensional vector
valueAndGradientAt pos =
case gradientAt field pos of
V3 x y z -> V4 x y z (valueAt field pos)
-- calculate corners and values of cube
handleCube pos =
let corners = cubeCorners cubeSize pos
values = V.map valueAndGradientAt corners
in generateMesh isoLevel corners values
-- | Calculates the cornes of a cube
cubeCorners :: (Num a) => a -> V3 a -> V.Vector (V3 a)
cubeCorners width (V3 x y z) = V.fromList
[ V3 x y z
, V3 (x+width) y z
, V3 (x+width) y (z+width)
, V3 x y (z+width)
, V3 x (y+width) z
, V3 (x+width) (y+width) z
, V3 (x+width) (y+width) (z+width)
, V3 x (y+width) (z+width)
]
-- | Calculates the intersection of an edge with the iso-surface
-- and the corresponding normal vector.
interpolate :: (Floating a, Epsilon a)
=> a -- ^ isoLevel
-> V3 a -- ^ start point of edge
-> V3 a -- ^ end point of edge
-> V4 a -- ^ start value of edge
-> V4 a -- ^ end value of edge
-> (V3 a, V3 a) -- ^ point of intersection and normal vector
interpolate isoLevel v0 v1 val0 val1
| nearZero $ val0 ^. _w - isoLevel = (v0, val1 ^. _xyz)
| nearZero $ val1 ^. _w - isoLevel = (v1, val1 ^. _xyz)
| nearZero $ val1 ^. _w - val0 ^. _w = (v0, val0 ^. _xyz)
| otherwise =
( v0 ^+^ mu *^ (v1 ^-^ v0)
, normalize $ (val0 ^+^ mu *^ (val1 ^-^ val0)) ^. _xyz
) where
mu = (isoLevel - val0 ^. _w) / (val1 ^. _w - val0 ^. _w)
-- | Generates the mesh for one cube.
generateMesh :: (Ord a, Floating a, Epsilon a)
=> a -- ^ iso level
-> V.Vector (V3 a) -- ^ cube corner positions
-> V.Vector (V4 a) -- ^ cube corner values (gradient + density)
-> [(V3 a, V3 a)] -- ^ list of triangle vertices with corresponding normal vectors
generateMesh isoLevel corners values = concatMap (vectorToList . fmap (intersections V.!)) triangles where
-- index of cube in the lookup tables
cubeIndex = V.ifoldl' (\idx i v -> if v ^. _w >= isoLevel then idx .|. (1 `shiftL` i) else idx) 0 values
triangles = mcTriangles V.! cubeIndex
-- indices of corners participating in the respective edges
edges = [(0,1), (1,2), (2,3), (3,0), (4,5), (5,6), (6,7), (7,4), (0,4), (1,5), (2,6), (3,7)]
-- lazy vector of interpolated intersections
intersections = V.fromList
[ interpolate isoLevel (corners V.! i) (corners V.! j) (values V.! i) (values V.! j)
| (i,j) <- edges ]
-- | returns the elements of the vector as list
vectorToList :: V3 a -> [a]
vectorToList (V3 x y z) = [x,y,z]
|
fatho/volume
|
src/Graphics/Volume/MarchingCubes.hs
|
bsd-3-clause
| 3,878 | 0 | 14 | 1,190 | 1,305 | 710 | 595 | -1 | -1 |
module IA.GA
(
GenoType
, GenoTypes
, PhenoType
, Population
, Select
, CrossOver
, Mutate
, Fitness
, FitnessType(..)
, mkFitness
, mkSelect
, mkCrossOver
, mkMutate
, binaryTournament
, runGA
, randomRSt
, randomSt
, GeneBits(..)
, mutateBits
, crossOverBits
, mutateSeq
, crossOverSeq
) where
import Control.Monad.State
import Data.Bits ((.&.), (.|.))
import Data.Ord (comparing)
import Data.Sequence ((|>),(><),ViewL(..))
import Data.Traversable as T
import System.Random
import qualified Control.Exception as E
import qualified Data.Bits as B
import qualified Data.Sequence as S
type GenoType a = a
type GenoTypes a = S.Seq (GenoType a)
type PhenoType a b = (GenoType a, b)
type Population a b = S.Seq (PhenoType a b)
data Select a b g = Select Int (Population a b -> State g (GenoTypes a))
newtype CrossOver a g =
CrossOver (GenoTypes a -> State g (GenoTypes a))
data Mutate a g = Mutate Double (GenoType a -> State g (GenoType a))
data FitnessType = FitnessMaximize | FitnessMinimize
data Fitness a b = Fitness FitnessType (GenoType a -> b)
nextGen :: (RandomGen g, Ord b)
=> Population a b
-> Fitness a b
-> Select a b g
-> CrossOver a g
-> Mutate a g
-> State g (Population a b)
nextGen pop
fitness@(Fitness fitnessType _)
(Select eliteCount selectFun)
(CrossOver crossOverFun)
(Mutate mutateRate mutateFun) =
(sortPop fitnessType . (elitePop ><)) `fmap` newPop
where elitePop = S.take eliteCount pop
popLength = S.length pop
takeBestPop = S.take $ E.assert (popLength > eliteCount)
(popLength - eliteCount)
newGenes = selectFun pop >>= crossOverFun >>= traverse mutate
newPop = (takeBestPop . toPop fitness) `fmap` newGenes
mutate g = do
i <- randomRSt (0, 1)
if i <= mutateRate then
mutateFun g
else
return g
sortPop :: Ord b => FitnessType -> Population a b -> Population a b
sortPop fitnessType = S.sortBy cmp
where cmp = case fitnessType of
FitnessMaximize -> flip $ comparing snd
FitnessMinimize -> comparing snd
toPop :: Ord b => Fitness a b -> GenoTypes a -> Population a b
toPop (Fitness fitnessType fitnessFun) =
sortPop fitnessType . fmap (\x -> (x, fitnessFun x))
runGA :: (RandomGen g, Ord b)
=> g
-> Int
-> GenoTypes a
-> Fitness a b
-> Select a b g
-> CrossOver a g
-> Mutate a g
-> GenoType a
runGA rgen iterNb initialGenes fitness select crossOver mutate =
let initialPop = toPop fitness initialGenes
runNextGen p = nextGen p fitness select crossOver mutate
finalGenoType = (fst . headSeq) `fmap` iterM iterNb runNextGen initialPop
in
evalState finalGenoType rgen
iterM :: (Monad m) => Int -> (a -> m a) -> a -> m a
iterM 0 _ a = return a
iterM n f a = f a >>= iterM (n - 1) f
headSeq :: S.Seq a -> a
headSeq s = h
where (h :< _) = S.viewl s
tailSeq :: S.Seq a -> S.Seq a
tailSeq s = t
where (_ :< t) = S.viewl s
randomSt :: (RandomGen g, Random a) => State g a
randomSt = state random
randomRSt :: (RandomGen g, Random a) => (a, a) -> State g a
randomRSt a = state $ randomR a
binaryTournament :: (RandomGen g, Ord b) => Population a b -> State g (GenoType a)
binaryTournament pop = do
let l = S.length pop
l' = E.assert (l > 0) (l - 1)
i <- randomRSt (0, l')
j <- randomRSt (0, l')
let (gi, si) = S.index pop i
(gj, sj) = S.index pop j
return $ if si > sj then gi else gj
mkSelect :: Int
-> (Population a b -> State g (GenoType a))
-> Select a b g
mkSelect eliteCount f =
Select eliteCount (\pop -> T.sequence . fmap (const $ f pop) $ pop)
mkCrossOver ::(GenoType a -> GenoType a -> State g (GenoType a))
-> CrossOver a g
mkCrossOver f = CrossOver go
where go genoTypes = T.sequence . fmap (uncurry f) $ couples
where g = genoTypes |> headSeq genoTypes
couples = S.zip g $ tailSeq g
mkMutate :: Double -> (GenoType a -> State g (GenoType a))
-> Mutate a g
mkMutate = Mutate
mkFitness :: FitnessType -> (a -> b) -> Fitness a b
mkFitness = Fitness
data GeneBits a = GeneBits !a !Int
deriving (Show, Eq)
mutateBits :: (RandomGen g, B.Bits a) => GeneBits a -> State g (GeneBits a)
mutateBits (GeneBits bits len) = do
i <- randomRSt (0, E.assert (len > 0) (len - 1))
return $ GeneBits (B.complementBit bits i) len
crossOverBits :: (RandomGen g, B.Bits a)
=> GeneBits a
-> GeneBits a
-> State g (GeneBits a)
crossOverBits g@(GeneBits _ len) g'@(GeneBits _ len') = do
i <- randomRSt (0, E.assert (len == len' && len > 0) (len - 1))
return $ mergeGeneBits g g' i
mergeGeneBits :: B.Bits a
=> GeneBits a
-> GeneBits a
-> Int
-> GeneBits a
mergeGeneBits (GeneBits bits len) (GeneBits bits' len') i =
GeneBits (mergeBits (bits .&. mask) (bits' .&. mask) i) len
where mask = B.complement $ B.shift oneBits $ E.assert (len == len') len
mergeBits :: B.Bits a => a -> a -> Int -> a
mergeBits b b' i = b .&. leftMask .|. b' .&. rightMask
where rightMask = B.shift oneBits i
leftMask = B.complement rightMask
oneBits :: B.Bits a => a
oneBits = B.complement B.zeroBits
mutateSeq :: (RandomGen g, Random a) => S.Seq a -> State g (S.Seq a)
mutateSeq xs = do
let l = S.length xs
i <- randomRSt (0, E.assert (l > 0) (l - 1))
newVal <- randomSt
return $ S.update i newVal xs
crossOverSeq :: (RandomGen g) => S.Seq a -> S.Seq a -> State g (S.Seq a)
crossOverSeq g g' = do
let l = E.assert (S.length g == S.length g') (S.length g)
i <- randomRSt (0, E.assert (l > 0) (l - 1))
return $ S.take i g >< S.drop i g'
|
dlgd/GA
|
src/IA/GA.hs
|
bsd-3-clause
| 5,977 | 0 | 14 | 1,741 | 2,478 | 1,274 | 1,204 | 169 | 2 |
{-# LANGUAGE CPP #-}
module Distribution.Simple.UUAGC.UUAGC(uuagcUserHook,
uuagcUserHook',
uuagc,
uuagcLibUserHook,
uuagcFromString
) where
import Distribution.Simple.BuildPaths (autogenModulesDir)
import Debug.Trace
import Distribution.Simple
import Distribution.Simple.PreProcess
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.Utils
import Distribution.Simple.Setup
import Distribution.PackageDescription hiding (Flag)
import Distribution.Simple.UUAGC.AbsSyn( AGFileOption(..)
, AGFileOptions
, AGOptionsClass(..)
, lookupFileOptions
, fileClasses
)
import Distribution.Simple.UUAGC.Parser
import Options hiding (verbose)
import Distribution.Verbosity
import System.Process( CreateProcess(..), createProcess, CmdSpec(..)
, StdStream(..), runProcess, waitForProcess
, shell)
import System.Directory(getModificationTime
,doesFileExist
,removeFile)
import System.FilePath(pathSeparators,
(</>),
takeFileName,
normalise,
joinPath,
dropFileName,
addExtension,
dropExtension,
replaceExtension,
splitDirectories)
import System.Exit (ExitCode(..))
import System.IO( openFile, IOMode(..),
hFileSize,
hSetFileSize,
hClose,
hGetContents,
hFlush,
Handle(..), stderr, hPutStr, hPutStrLn)
import System.Exit(exitFailure)
import Control.Exception (throwIO)
import Control.Monad (liftM, when, guard, forM_, forM)
import Control.Arrow ((&&&), second)
import Data.Maybe (maybeToList)
import Data.Either (partitionEithers)
import Data.List (nub,intersperse)
import Data.Map (Map)
import qualified Data.Map as Map
{-# DEPRECATED uuagcUserHook, uuagcUserHook', uuagc "Use uuagcLibUserHook instead" #-}
-- | 'uuagc' returns the name of the uuagc compiler
uuagcn = "uuagc"
-- | 'defUUAGCOptions' returns the default names of the uuagc options
defUUAGCOptions :: String
defUUAGCOptions = "uuagc_options"
-- | File used to store de classes defined in the cabal file.
agClassesFile :: String
agClassesFile = "ag_file_options"
-- | The prefix used for the cabal file optionsw
agModule :: String
agModule = "x-agmodule"
-- | The prefix used for the cabal file options used for defining classes
agClass :: String
agClass = "x-agclass"
-- | Deprecated userhook
uuagcUserHook :: UserHooks
uuagcUserHook = uuagcUserHook' uuagcn
-- | Deprecated userhook
uuagcUserHook' :: String -> UserHooks
uuagcUserHook' uuagcPath = uuagcLibUserHook (uuagcFromString uuagcPath)
-- | Create uuagc function using shell (old method)
uuagcFromString :: String -> [String] -> FilePath -> IO (ExitCode, [FilePath])
uuagcFromString uuagcPath args file = do
let argline = uuagcPath ++ concatMap (' ':) (args ++ [file])
(_, Just ppOutput, Just ppError, ph) <- createProcess (shell argline)
{ std_in = Inherit
, std_out = CreatePipe
, std_err = CreatePipe
}
ec <- waitForProcess ph
case ec of
ExitSuccess ->
do putErrorInfo ppError
fls <- processContent ppOutput
return (ExitSuccess, fls)
(ExitFailure exc) ->
do hPutStrLn stderr (uuagcPath ++ ": " ++ show exc)
putErrorInfo ppOutput
putErrorInfo ppError
return (ExitFailure exc, [])
-- | Main hook, argument should be uuagc function
uuagcLibUserHook :: ([String] -> FilePath -> IO (ExitCode, [FilePath])) -> UserHooks
uuagcLibUserHook uuagc = hooks where
hooks = simpleUserHooks { hookedPreProcessors = ("ag", ag):("lag",ag):knownSuffixHandlers
, buildHook = uuagcBuildHook uuagc
, sDistHook = uuagcSDistHook uuagc
}
ag = uuagc' uuagc
originalPreBuild = preBuild simpleUserHooks
originalBuildHook = buildHook simpleUserHooks
originalSDistHook = sDistHook simpleUserHooks
processContent :: Handle -> IO [String]
processContent = liftM words . hGetContents
putErrorInfo :: Handle -> IO ()
putErrorInfo h = hGetContents h >>= hPutStr stderr
-- | 'updateAGFile' search into the uuagc options file for a list of all
-- AG Files and theirs file dependencies in order to see if the latters
-- are more updated that the formers, and if this is the case to
-- update the AG File
updateAGFile :: ([String] -> FilePath -> IO (ExitCode, [FilePath]))
-> Map FilePath (Options, Maybe (FilePath, [String]))
-> (FilePath, (Options, Maybe (FilePath, [String])))
-> IO ()
updateAGFile _ _ (_,(_,Nothing)) = return ()
updateAGFile uuagc newOptions (file,(opts,Just (gen,sp))) = do
hasGen <- doesFileExist gen
when hasGen $ do
(ec, files) <- uuagc (optionsToString $ opts { genFileDeps = True, searchPath = sp }) file
case ec of
ExitSuccess -> do
let newOpts :: Options
newOpts = maybe noOptions fst $ Map.lookup file newOptions
optRebuild = optionsToString newOpts /= optionsToString opts
modRebuild <-
if null files
then return False
else do
flsmt <- mapM getModificationTime files
let maxModified = maximum flsmt
fmt <- getModificationTime gen
return $ maxModified > fmt
-- When some dependency is newer or options have changed, we should regenerate
when (optRebuild || modRebuild) $ removeFile gen
ex@(ExitFailure _) -> throwIO ex
getAGFileOptions :: [(String, String)] -> IO AGFileOptions
getAGFileOptions extra = do
cabalOpts <- mapM (parseOptionAG . snd) $ filter ((== agModule) . fst) extra
usesOptionsFile <- doesFileExist defUUAGCOptions
if usesOptionsFile
then do r <- parserAG' defUUAGCOptions
case r of
Left e -> die (show e)
Right a -> return $ cabalOpts ++ a
else return cabalOpts
getAGClasses :: [(String, String)] -> IO [AGOptionsClass]
getAGClasses = mapM (parseClassAG . snd) . filter ((== agClass) . fst)
writeFileOptions :: FilePath -> Map FilePath (Options, Maybe (FilePath,[String])) -> IO ()
writeFileOptions classesPath opts = do
hClasses <- openFile classesPath WriteMode
hPutStr hClasses $ show $ Map.map (\(opt,gen) -> (optionsToString opt, gen)) opts
hFlush hClasses
hClose hClasses
readFileOptions :: FilePath -> IO (Map FilePath (Options, Maybe (FilePath,[String])))
readFileOptions classesPath = do
isFile <- doesFileExist classesPath
if isFile
then do hClasses <- openFile classesPath ReadMode
sClasses <- hGetContents hClasses
classes <- readIO sClasses :: IO (Map FilePath ([String], Maybe (FilePath,[String])))
hClose hClasses
return $ Map.map (\(opt,gen) -> let (opt',_,_) = getOptions opt in (opt', gen)) classes
else return Map.empty
getOptionsFromClass :: [(String, Options)] -> AGFileOption -> ([String], Options)
getOptionsFromClass classes fOpt =
second (foldl combineOptions (opts fOpt))
. partitionEithers $ do
fClass <- fileClasses fOpt
case fClass `lookup` classes of
Just x -> return $ Right x
Nothing -> return $ Left $ "Warning: The class "
++ show fClass
++ " is not defined."
uuagcSDistHook :: ([String] -> FilePath -> IO (ExitCode, [FilePath]))
-> PackageDescription
-> Maybe LocalBuildInfo
-> UserHooks
-> SDistFlags
-> IO ()
uuagcSDistHook uuagc pd mbLbi uh df = do
{-
case mbLbi of
Nothing -> warn normal "sdist: the local buildinfo was not present. Skipping AG initialization. Dist may fail."
Just lbi -> let classesPath = buildDir lbi </> agClassesFile
in commonHook uuagc classesPath pd lbi (sDistVerbosity df)
originalSDistHook pd mbLbi uh df
-}
originalSDistHook pd mbLbi (uh { hookedPreProcessors = ("ag", nouuagc):("lag",nouuagc):knownSuffixHandlers }) df -- bypass preprocessors
uuagcBuildHook
:: ([String] -> FilePath -> IO (ExitCode, [FilePath]))
-> PackageDescription
-> LocalBuildInfo
-> UserHooks
-> BuildFlags
-> IO ()
uuagcBuildHook uuagc pd lbi uh bf = do
let classesPath = buildDir lbi </> agClassesFile
commonHook uuagc classesPath pd lbi (buildVerbosity bf)
originalBuildHook pd lbi uh bf
commonHook :: ([String] -> FilePath -> IO (ExitCode, [FilePath]))
-> FilePath
-> PackageDescription
-> LocalBuildInfo
-> Flag Verbosity
-> IO ()
commonHook uuagc classesPath pd lbi fl = do
let verbosity = fromFlagOrDefault normal fl
info verbosity $ "commonHook: Assuming AG classesPath: " ++ classesPath
createDirectoryIfMissingVerbose verbosity True (buildDir lbi)
-- Read already existing options
-- Map FilePath (Options, Maybe (FilePath,[String]))
oldOptions <- readFileOptions classesPath
-- Read options from cabal and settings file
let lib = library pd
exes = executables pd
bis = map libBuildInfo (maybeToList lib) ++ map buildInfo exes
classes <- map (className &&& opts') `fmap` (getAGClasses . customFieldsPD $ pd)
configOptions <- getAGFileOptions (bis >>= customFieldsBI)
-- Construct new options map
newOptionsL <- forM configOptions (\ opt ->
let (notFound, opts) = getOptionsFromClass classes $ opt
file = normalise $ filename opt
gen = maybe Nothing snd $ Map.lookup file oldOptions
in do info verbosity $ "options for " ++ file ++ ": " ++ unwords (optionsToString opts)
forM_ notFound (hPutStrLn stderr)
return (file, (opts, gen)))
let newOptions = Map.fromList newOptionsL
writeFileOptions classesPath newOptions
-- Check if files should be regenerated
mapM_ (updateAGFile uuagc newOptions) $ Map.toList oldOptions
getAGFileList :: AGFileOptions -> [FilePath]
getAGFileList = map (normalise . filename)
uuagc :: BuildInfo -> LocalBuildInfo -> PreProcessor
uuagc = uuagc' (uuagcFromString uuagcn)
uuagc' :: ([String] -> FilePath -> IO (ExitCode, [FilePath]))
-> BuildInfo
-> LocalBuildInfo
-> PreProcessor
uuagc' uuagc build lbi =
PreProcessor {
platformIndependent = True,
runPreProcessor = mkSimplePreProcessor $ \ inFile outFile verbosity ->
do notice verbosity $ "[UUAGC] processing: " ++ inFile ++ " generating: " ++ outFile
let classesPath = buildDir lbi </> agClassesFile
info verbosity $ "uuagc-preprocessor: Assuming AG classesPath: " ++ classesPath
fileOpts <- readFileOptions classesPath
opts <- case Map.lookup inFile fileOpts of
Nothing -> do warn verbosity $ "No options found for " ++ inFile
return noOptions
Just (opt,gen) -> return opt
let search = dropFileName inFile
options = opts { searchPath = search : hsSourceDirs build ++ searchPath opts
, outputFiles = outFile : (outputFiles opts) }
(eCode,_) <- uuagc (optionsToString options) inFile
case eCode of
ExitSuccess -> writeFileOptions classesPath (Map.insert inFile (opts, Just (outFile, searchPath options)) fileOpts)
ex@(ExitFailure _) -> throwIO ex
}
nouuagc :: BuildInfo -> LocalBuildInfo -> PreProcessor
nouuagc build lbi =
PreProcessor {
platformIndependent = True,
runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do
info verbosity ("skipping: " ++ outFile)
}
|
norm2782/uuagc
|
cabal-plugin/src/Distribution/Simple/UUAGC/UUAGC.hs
|
bsd-3-clause
| 12,604 | 0 | 22 | 3,846 | 3,085 | 1,630 | 1,455 | 240 | 3 |
module TestFactorial where
import Factorial
import TestUtil
ff :: Int
--ff = 2 ^ 27
ff = 6
run :: IO ()
run = do
putStrLn ("factorial1 " ++ show ff ++ " = " ++ show (factorial1 ff))
putStrLn ("factorial2 " ++ show ff ++ " = " ++ show (factorial2 ff))
|
pmilne/algebra
|
test/TestFactorial.hs
|
bsd-3-clause
| 275 | 0 | 12 | 77 | 102 | 52 | 50 | 9 | 1 |
{-
Copyright James d'Arcy 2010
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of James d'Arcy nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Data.Dicom.Tag where
import Data.Word (Word32)
type DicomTag = Word32
-- | Group 0x0002 - Metadata
tRANSFER_SYNTAX_UID :: DicomTag
tRANSFER_SYNTAX_UID = 0x00020010
-- | Group 0x0008
sOP_CLASS_UID :: DicomTag
sOP_CLASS_UID = 0x00080016
sOP_INSTANCE_UID :: DicomTag
sOP_INSTANCE_UID = 0x00080018
sTUDY_DATE :: DicomTag
sTUDY_DATE = 0x00080020
sERIES_DATE :: DicomTag
sERIES_DATE = 0x00080021
mODALITY :: DicomTag
mODALITY = 0x00080060
sTUDY_DESCRIPTION :: DicomTag
sTUDY_DESCRIPTION = 0x00081030
sERIES_DESCRIPTION :: DicomTag
sERIES_DESCRIPTION = 0x0008103e
-- | Group 0x0010
pATIENT_NAME :: DicomTag
pATIENT_NAME = 0x00100010
-- | Group 0x0020
sTUDY_INSTANCE_UID :: DicomTag
sTUDY_INSTANCE_UID = 0x0020000d
sERIES_INSTANCE_UID :: DicomTag
sERIES_INSTANCE_UID = 0x0020000e
sTUDY_ID :: DicomTag
sTUDY_ID = 0x00200010
sERIES_NUMBER :: DicomTag
sERIES_NUMBER = 0x00200011
-- | Group 0x0028
nUMBER_OF_FRAMES :: DicomTag
nUMBER_OF_FRAMES = 0x00280008
rOWS :: DicomTag
rOWS = 0x00280010
cOLUMNS :: DicomTag
cOLUMNS = 0x00280011
-- | Group 0x7fe0
pIXEL_DATA :: DicomTag
pIXEL_DATA = 0x7fe00010
|
jamesdarcy/DicomH
|
src/Data/Dicom/Tag.hs
|
bsd-3-clause
| 2,638 | 0 | 5 | 438 | 200 | 124 | 76 | 37 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Error-checking and other utilities for @deriving@ clauses or declarations.
-}
{-# LANGUAGE TypeFamilies #-}
module TcDerivUtils (
DerivM, DerivEnv(..),
DerivSpec(..), pprDerivSpec,
DerivSpecMechanism(..), isDerivSpecStock,
isDerivSpecNewtype, isDerivSpecAnyClass,
DerivContext, DerivStatus(..),
PredOrigin(..), ThetaOrigin(..), mkPredOrigin,
mkThetaOrigin, mkThetaOriginFromPreds, substPredOrigin,
checkSideConditions, hasStockDeriving,
canDeriveAnyClass,
std_class_via_coercible, non_coercible_class,
newDerivClsInst, extendLocalInstEnv
) where
import GhcPrelude
import Bag
import BasicTypes
import Class
import DataCon
import DynFlags
import ErrUtils
import HscTypes (lookupFixity, mi_fix)
import HsSyn
import Inst
import InstEnv
import LoadIface (loadInterfaceForName)
import Module (getModule)
import Name
import Outputable
import PrelNames
import SrcLoc
import TcGenDeriv
import TcGenFunctor
import TcGenGenerics
import TcRnMonad
import TcType
import THNames (liftClassKey)
import TyCon
import Type
import Util
import VarSet
import Control.Monad.Trans.Reader
import qualified GHC.LanguageExtensions as LangExt
import ListSetOps (assocMaybe)
-- | To avoid having to manually plumb everything in 'DerivEnv' throughout
-- various functions in @TcDeriv@ and @TcDerivInfer@, we use 'DerivM', which
-- is a simple reader around 'TcRn'.
type DerivM = ReaderT DerivEnv TcRn
-- | Contains all of the information known about a derived instance when
-- determining what its @EarlyDerivSpec@ should be.
data DerivEnv = DerivEnv
{ denv_overlap_mode :: Maybe OverlapMode
-- ^ Is this an overlapping instance?
, denv_tvs :: [TyVar]
-- ^ Universally quantified type variables in the instance
, denv_cls :: Class
-- ^ Class for which we need to derive an instance
, denv_cls_tys :: [Type]
-- ^ Other arguments to the class except the last
, denv_tc :: TyCon
-- ^ Type constructor for which the instance is requested
-- (last arguments to the type class)
, denv_tc_args :: [Type]
-- ^ Arguments to the type constructor
, denv_rep_tc :: TyCon
-- ^ The representation tycon for 'denv_tc'
-- (for data family instances)
, denv_rep_tc_args :: [Type]
-- ^ The representation types for 'denv_tc_args'
-- (for data family instances)
, denv_mtheta :: DerivContext
-- ^ 'Just' the context of the instance, for standalone deriving.
-- 'Nothing' for @deriving@ clauses.
, denv_strat :: Maybe DerivStrategy
-- ^ 'Just' if user requests a particular deriving strategy.
-- Otherwise, 'Nothing'.
}
instance Outputable DerivEnv where
ppr (DerivEnv { denv_overlap_mode = overlap_mode
, denv_tvs = tvs
, denv_cls = cls
, denv_cls_tys = cls_tys
, denv_tc = tc
, denv_tc_args = tc_args
, denv_rep_tc = rep_tc
, denv_rep_tc_args = rep_tc_args
, denv_mtheta = mtheta
, denv_strat = mb_strat })
= hang (text "DerivEnv")
2 (vcat [ text "denv_overlap_mode" <+> ppr overlap_mode
, text "denv_tvs" <+> ppr tvs
, text "denv_cls" <+> ppr cls
, text "denv_cls_tys" <+> ppr cls_tys
, text "denv_tc" <+> ppr tc
, text "denv_tc_args" <+> ppr tc_args
, text "denv_rep_tc" <+> ppr rep_tc
, text "denv_rep_tc_args" <+> ppr rep_tc_args
, text "denv_mtheta" <+> ppr mtheta
, text "denv_strat" <+> ppr mb_strat ])
data DerivSpec theta = DS { ds_loc :: SrcSpan
, ds_name :: Name -- DFun name
, ds_tvs :: [TyVar]
, ds_theta :: theta
, ds_cls :: Class
, ds_tys :: [Type]
, ds_tc :: TyCon
, ds_overlap :: Maybe OverlapMode
, ds_mechanism :: DerivSpecMechanism }
-- This spec implies a dfun declaration of the form
-- df :: forall tvs. theta => C tys
-- The Name is the name for the DFun we'll build
-- The tyvars bind all the variables in the theta
-- For type families, the tycon in
-- in ds_tys is the *family* tycon
-- in ds_tc is the *representation* type
-- For non-family tycons, both are the same
-- the theta is either the given and final theta, in standalone deriving,
-- or the not-yet-simplified list of constraints together with their origin
-- ds_mechanism specifies the means by which GHC derives the instance.
-- See Note [Deriving strategies] in TcDeriv
{-
Example:
newtype instance T [a] = MkT (Tree a) deriving( C s )
==>
axiom T [a] = :RTList a
axiom :RTList a = Tree a
DS { ds_tvs = [a,s], ds_cls = C, ds_tys = [s, T [a]]
, ds_tc = :RTList, ds_mechanism = DerivSpecNewtype (Tree a) }
-}
pprDerivSpec :: Outputable theta => DerivSpec theta -> SDoc
pprDerivSpec (DS { ds_loc = l, ds_name = n, ds_tvs = tvs, ds_cls = c,
ds_tys = tys, ds_theta = rhs, ds_mechanism = mech })
= hang (text "DerivSpec")
2 (vcat [ text "ds_loc =" <+> ppr l
, text "ds_name =" <+> ppr n
, text "ds_tvs =" <+> ppr tvs
, text "ds_cls =" <+> ppr c
, text "ds_tys =" <+> ppr tys
, text "ds_theta =" <+> ppr rhs
, text "ds_mechanism =" <+> ppr mech ])
instance Outputable theta => Outputable (DerivSpec theta) where
ppr = pprDerivSpec
-- What action to take in order to derive a class instance.
-- See Note [Deriving strategies] in TcDeriv
data DerivSpecMechanism
= DerivSpecStock -- "Standard" classes
(SrcSpan -> TyCon
-> [Type]
-> TcM (LHsBinds GhcPs, BagDerivStuff, [Name]))
-- This function returns three things:
--
-- 1. @LHsBinds GhcPs@: The derived instance's function bindings
-- (e.g., @compare (T x) (T y) = compare x y@)
-- 2. @BagDerivStuff@: Auxiliary bindings needed to support the derived
-- instance. As examples, derived 'Generic' instances require
-- associated type family instances, and derived 'Eq' and 'Ord'
-- instances require top-level @con2tag@ functions.
-- See Note [Auxiliary binders] in TcGenDeriv.
-- 3. @[Name]@: A list of Names for which @-Wunused-binds@ should be
-- suppressed. This is used to suppress unused warnings for record
-- selectors when deriving 'Read', 'Show', or 'Generic'.
-- See Note [Deriving and unused record selectors].
| DerivSpecNewtype -- -XGeneralizedNewtypeDeriving
Type -- The newtype rep type
| DerivSpecAnyClass -- -XDeriveAnyClass
isDerivSpecStock, isDerivSpecNewtype, isDerivSpecAnyClass
:: DerivSpecMechanism -> Bool
isDerivSpecStock (DerivSpecStock{}) = True
isDerivSpecStock _ = False
isDerivSpecNewtype (DerivSpecNewtype{}) = True
isDerivSpecNewtype _ = False
isDerivSpecAnyClass (DerivSpecAnyClass{}) = True
isDerivSpecAnyClass _ = False
-- A DerivSpecMechanism can be losslessly converted to a DerivStrategy.
mechanismToStrategy :: DerivSpecMechanism -> DerivStrategy
mechanismToStrategy (DerivSpecStock{}) = StockStrategy
mechanismToStrategy (DerivSpecNewtype{}) = NewtypeStrategy
mechanismToStrategy (DerivSpecAnyClass{}) = AnyclassStrategy
instance Outputable DerivSpecMechanism where
ppr = ppr . mechanismToStrategy
type DerivContext = Maybe ThetaType
-- Nothing <=> Vanilla deriving; infer the context of the instance decl
-- Just theta <=> Standalone deriving: context supplied by programmer
data DerivStatus = CanDerive -- Stock class, can derive
| DerivableClassError SDoc -- Stock class, but can't do it
| DerivableViaInstance -- See Note [Deriving any class]
| NonDerivableClass SDoc -- Non-stock class
-- A stock class is one either defined in the Haskell report or for which GHC
-- otherwise knows how to generate code for (possibly requiring the use of a
-- language extension), such as Eq, Ord, Ix, Data, Generic, etc.
-- | A 'PredType' annotated with the origin of the constraint 'CtOrigin',
-- and whether or the constraint deals in types or kinds.
data PredOrigin = PredOrigin PredType CtOrigin TypeOrKind
-- | A list of wanted 'PredOrigin' constraints ('to_wanted_origins') alongside
-- any corresponding given constraints ('to_givens') and locally quantified
-- type variables ('to_tvs').
--
-- In most cases, 'to_givens' will be empty, as most deriving mechanisms (e.g.,
-- stock and newtype deriving) do not require given constraints. The exception
-- is @DeriveAnyClass@, which can involve given constraints. For example,
-- if you tried to derive an instance for the following class using
-- @DeriveAnyClass@:
--
-- @
-- class Foo a where
-- bar :: a -> b -> String
-- default bar :: (Show a, Ix b) => a -> b -> String
-- bar = show
--
-- baz :: Eq a => a -> a -> Bool
-- default baz :: Ord a => a -> a -> Bool
-- baz x y = compare x y == EQ
-- @
--
-- Then it would generate two 'ThetaOrigin's, one for each method:
--
-- @
-- [ ThetaOrigin { to_tvs = [b]
-- , to_givens = []
-- , to_wanted_origins = [Show a, Ix b] }
-- , ThetaOrigin { to_tvs = []
-- , to_givens = [Eq a]
-- , to_wanted_origins = [Ord a] }
-- ]
-- @
data ThetaOrigin
= ThetaOrigin { to_tvs :: [TyVar]
, to_givens :: ThetaType
, to_wanted_origins :: [PredOrigin] }
instance Outputable PredOrigin where
ppr (PredOrigin ty _ _) = ppr ty -- The origin is not so interesting when debugging
instance Outputable ThetaOrigin where
ppr (ThetaOrigin { to_tvs = tvs
, to_givens = givens
, to_wanted_origins = wanted_origins })
= hang (text "ThetaOrigin")
2 (vcat [ text "to_tvs =" <+> ppr tvs
, text "to_givens =" <+> ppr givens
, text "to_wanted_origins =" <+> ppr wanted_origins ])
mkPredOrigin :: CtOrigin -> TypeOrKind -> PredType -> PredOrigin
mkPredOrigin origin t_or_k pred = PredOrigin pred origin t_or_k
mkThetaOrigin :: CtOrigin -> TypeOrKind -> [TyVar] -> ThetaType -> ThetaType
-> ThetaOrigin
mkThetaOrigin origin t_or_k tvs givens
= ThetaOrigin tvs givens . map (mkPredOrigin origin t_or_k)
-- A common case where the ThetaOrigin only contains wanted constraints, with
-- no givens or locally scoped type variables.
mkThetaOriginFromPreds :: [PredOrigin] -> ThetaOrigin
mkThetaOriginFromPreds = ThetaOrigin [] []
substPredOrigin :: HasCallStack => TCvSubst -> PredOrigin -> PredOrigin
substPredOrigin subst (PredOrigin pred origin t_or_k)
= PredOrigin (substTy subst pred) origin t_or_k
{-
************************************************************************
* *
Class deriving diagnostics
* *
************************************************************************
Only certain blessed classes can be used in a deriving clause (without the
assistance of GeneralizedNewtypeDeriving or DeriveAnyClass). These classes
are listed below in the definition of hasStockDeriving. The sideConditions
function determines the criteria that needs to be met in order for a particular
class to be able to be derived successfully.
A class might be able to be used in a deriving clause if -XDeriveAnyClass
is willing to support it. The canDeriveAnyClass function checks if this is the
case.
-}
hasStockDeriving
:: Class -> Maybe (SrcSpan
-> TyCon
-> [Type]
-> TcM (LHsBinds GhcPs, BagDerivStuff, [Name]))
hasStockDeriving clas
= assocMaybe gen_list (getUnique clas)
where
gen_list
:: [(Unique, SrcSpan
-> TyCon
-> [Type]
-> TcM (LHsBinds GhcPs, BagDerivStuff, [Name]))]
gen_list = [ (eqClassKey, simpleM gen_Eq_binds)
, (ordClassKey, simpleM gen_Ord_binds)
, (enumClassKey, simpleM gen_Enum_binds)
, (boundedClassKey, simple gen_Bounded_binds)
, (ixClassKey, simpleM gen_Ix_binds)
, (showClassKey, read_or_show gen_Show_binds)
, (readClassKey, read_or_show gen_Read_binds)
, (dataClassKey, simpleM gen_Data_binds)
, (functorClassKey, simple gen_Functor_binds)
, (foldableClassKey, simple gen_Foldable_binds)
, (traversableClassKey, simple gen_Traversable_binds)
, (liftClassKey, simple gen_Lift_binds)
, (genClassKey, generic (gen_Generic_binds Gen0))
, (gen1ClassKey, generic (gen_Generic_binds Gen1)) ]
simple gen_fn loc tc _
= let (binds, deriv_stuff) = gen_fn loc tc
in return (binds, deriv_stuff, [])
simpleM gen_fn loc tc _
= do { (binds, deriv_stuff) <- gen_fn loc tc
; return (binds, deriv_stuff, []) }
read_or_show gen_fn loc tc _
= do { fix_env <- getDataConFixityFun tc
; let (binds, deriv_stuff) = gen_fn fix_env loc tc
field_names = all_field_names tc
; return (binds, deriv_stuff, field_names) }
generic gen_fn _ tc inst_tys
= do { (binds, faminst) <- gen_fn tc inst_tys
; let field_names = all_field_names tc
; return (binds, unitBag (DerivFamInst faminst), field_names) }
-- See Note [Deriving and unused record selectors]
all_field_names = map flSelector . concatMap dataConFieldLabels
. tyConDataCons
{-
Note [Deriving and unused record selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this (see Trac #13919):
module Main (main) where
data Foo = MkFoo {bar :: String} deriving Show
main :: IO ()
main = print (Foo "hello")
Strictly speaking, the record selector `bar` is unused in this module, since
neither `main` nor the derived `Show` instance for `Foo` mention `bar`.
However, the behavior of `main` is affected by the presence of `bar`, since
it will print different output depending on whether `MkFoo` is defined using
record selectors or not. Therefore, we do not to issue a
"Defined but not used: ‘bar’" warning for this module, since removing `bar`
changes the program's behavior. This is the reason behind the [Name] part of
the return type of `hasStockDeriving`—it tracks all of the record selector
`Name`s for which -Wunused-binds should be suppressed.
Currently, the only three stock derived classes that require this are Read,
Show, and Generic, as their derived code all depend on the record selectors
of the derived data type's constructors.
See also Note [Newtype deriving and unused constructors] in TcDeriv for
another example of a similar trick.
-}
getDataConFixityFun :: TyCon -> TcM (Name -> Fixity)
-- If the TyCon is locally defined, we want the local fixity env;
-- but if it is imported (which happens for standalone deriving)
-- we need to get the fixity env from the interface file
-- c.f. RnEnv.lookupFixity, and Trac #9830
getDataConFixityFun tc
= do { this_mod <- getModule
; if nameIsLocalOrFrom this_mod name
then do { fix_env <- getFixityEnv
; return (lookupFixity fix_env) }
else do { iface <- loadInterfaceForName doc name
-- Should already be loaded!
; return (mi_fix iface . nameOccName) } }
where
name = tyConName tc
doc = text "Data con fixities for" <+> ppr name
------------------------------------------------------------------
-- Check side conditions that dis-allow derivability for particular classes
-- This is *apart* from the newtype-deriving mechanism
--
-- Here we get the representation tycon in case of family instances as it has
-- the data constructors - but we need to be careful to fall back to the
-- family tycon (with indexes) in error messages.
checkSideConditions :: DynFlags -> DerivContext -> Class -> [TcType]
-> TyCon -> TyCon
-> DerivStatus
checkSideConditions dflags mtheta cls cls_tys tc rep_tc
| Just cond <- sideConditions mtheta cls
= case (cond dflags tc rep_tc) of
NotValid err -> DerivableClassError err -- Class-specific error
IsValid | null (filterOutInvisibleTypes (classTyCon cls) cls_tys)
-> CanDerive
-- All stock derivable classes are unary in the sense that
-- there should be not types in cls_tys (i.e., no type args
-- other than last). Note that cls_types can contain
-- invisible types as well (e.g., for Generic1, which is
-- poly-kinded), so make sure those are not counted.
| otherwise -> DerivableClassError (classArgsErr cls cls_tys)
-- e.g. deriving( Eq s )
| NotValid err <- canDeriveAnyClass dflags
= NonDerivableClass err -- DeriveAnyClass does not work
| otherwise
= DerivableViaInstance -- DeriveAnyClass should work
classArgsErr :: Class -> [Type] -> SDoc
classArgsErr cls cls_tys = quotes (ppr (mkClassPred cls cls_tys)) <+> text "is not a class"
-- Side conditions (whether the datatype must have at least one constructor,
-- required language extensions, etc.) for using GHC's stock deriving
-- mechanism on certain classes (as opposed to classes that require
-- GeneralizedNewtypeDeriving or DeriveAnyClass). Returns Nothing for a
-- class for which stock deriving isn't possible.
sideConditions :: DerivContext -> Class -> Maybe Condition
sideConditions mtheta cls
| cls_key == eqClassKey = Just (cond_std `andCond` cond_args cls)
| cls_key == ordClassKey = Just (cond_std `andCond` cond_args cls)
| cls_key == showClassKey = Just (cond_std `andCond` cond_args cls)
| cls_key == readClassKey = Just (cond_std `andCond` cond_args cls)
| cls_key == enumClassKey = Just (cond_std `andCond` cond_isEnumeration)
| cls_key == ixClassKey = Just (cond_std `andCond` cond_enumOrProduct cls)
| cls_key == boundedClassKey = Just (cond_std `andCond` cond_enumOrProduct cls)
| cls_key == dataClassKey = Just (checkFlag LangExt.DeriveDataTypeable `andCond`
cond_vanilla `andCond`
cond_args cls)
| cls_key == functorClassKey = Just (checkFlag LangExt.DeriveFunctor `andCond`
cond_vanilla `andCond`
cond_functorOK True False)
| cls_key == foldableClassKey = Just (checkFlag LangExt.DeriveFoldable `andCond`
cond_vanilla `andCond`
cond_functorOK False True)
-- Functor/Fold/Trav works ok
-- for rank-n types
| cls_key == traversableClassKey = Just (checkFlag LangExt.DeriveTraversable `andCond`
cond_vanilla `andCond`
cond_functorOK False False)
| cls_key == genClassKey = Just (checkFlag LangExt.DeriveGeneric `andCond`
cond_vanilla `andCond`
cond_RepresentableOk)
| cls_key == gen1ClassKey = Just (checkFlag LangExt.DeriveGeneric `andCond`
cond_vanilla `andCond`
cond_Representable1Ok)
| cls_key == liftClassKey = Just (checkFlag LangExt.DeriveLift `andCond`
cond_vanilla `andCond`
cond_args cls)
| otherwise = Nothing
where
cls_key = getUnique cls
cond_std = cond_stdOK mtheta False -- Vanilla data constructors, at least one,
-- and monotype arguments
cond_vanilla = cond_stdOK mtheta True -- Vanilla data constructors but
-- allow no data cons or polytype arguments
canDeriveAnyClass :: DynFlags -> Validity
-- IsValid: we can (try to) derive it via an empty instance declaration
-- NotValid s: we can't, reason s
canDeriveAnyClass dflags
| not (xopt LangExt.DeriveAnyClass dflags)
= NotValid (text "Try enabling DeriveAnyClass")
| otherwise
= IsValid -- OK!
type Condition
= DynFlags
-> TyCon -- ^ The data type's 'TyCon'. For data families, this is the
-- family 'TyCon'.
-> TyCon -- ^ For data families, this is the representation 'TyCon'.
-- Otherwise, this is the same as the other 'TyCon' argument.
-> Validity -- ^ 'IsValid' if deriving an instance for this 'TyCon' is
-- possible. Otherwise, it's @'NotValid' err@, where @err@
-- explains what went wrong.
orCond :: Condition -> Condition -> Condition
orCond c1 c2 dflags tc rep_tc
= case (c1 dflags tc rep_tc, c2 dflags tc rep_tc) of
(IsValid, _) -> IsValid -- c1 succeeds
(_, IsValid) -> IsValid -- c21 succeeds
(NotValid x, NotValid y) -> NotValid (x $$ text " or" $$ y)
-- Both fail
andCond :: Condition -> Condition -> Condition
andCond c1 c2 dflags tc rep_tc
= c1 dflags tc rep_tc `andValid` c2 dflags tc rep_tc
-- | Some common validity checks shared among stock derivable classes. One
-- check that absolutely must hold is that if an instance @C (T a)@ is being
-- derived, then @T@ must be a tycon for a data type or a newtype. The
-- remaining checks are only performed if using a @deriving@ clause (i.e.,
-- they're ignored if using @StandaloneDeriving@):
--
-- 1. The data type must have at least one constructor (this check is ignored
-- if using @EmptyDataDeriving@).
--
-- 2. The data type cannot have any GADT constructors.
--
-- 3. The data type cannot have any constructors with existentially quantified
-- type variables.
--
-- 4. The data type cannot have a context (e.g., @data Foo a = Eq a => MkFoo@).
--
-- 5. The data type cannot have fields with higher-rank types.
cond_stdOK
:: DerivContext -- ^ 'Just' if this is standalone deriving, 'Nothing' if not.
-- If it is standalone, we relax some of the validity checks
-- we would otherwise perform (i.e., "just go for it").
-> Bool -- ^ 'True' <=> allow higher rank arguments and empty data
-- types (with no data constructors) even in the absence of
-- the -XEmptyDataDeriving extension.
-> Condition
cond_stdOK mtheta permissive dflags tc rep_tc
= valid_ADT `andValid` valid_misc
where
valid_ADT, valid_misc :: Validity
valid_ADT
| isAlgTyCon tc || isDataFamilyTyCon tc
= IsValid
| otherwise
-- Complain about functions, primitive types, and other tycons that
-- stock deriving can't handle.
= NotValid $ text "The last argument of the instance must be a"
<+> text "data or newtype application"
valid_misc
= case mtheta of
Just _ -> IsValid
-- Don't check these conservative conditions for
-- standalone deriving; just generate the code
-- and let the typechecker handle the result
Nothing
| null data_cons -- 1.
, not permissive
-> checkFlag LangExt.EmptyDataDeriving dflags tc rep_tc `orValid`
NotValid (no_cons_why rep_tc $$ empty_data_suggestion)
| not (null con_whys)
-> NotValid (vcat con_whys $$ standalone_suggestion)
| otherwise
-> IsValid
empty_data_suggestion =
text "Use EmptyDataDeriving to enable deriving for empty data types"
standalone_suggestion =
text "Possible fix: use a standalone deriving declaration instead"
data_cons = tyConDataCons rep_tc
con_whys = getInvalids (map check_con data_cons)
check_con :: DataCon -> Validity
check_con con
| not (null eq_spec) -- 2.
= bad "is a GADT"
| not (null ex_tvs) -- 3.
= bad "has existential type variables in its type"
| not (null theta) -- 4.
= bad "has constraints in its type"
| not (permissive || all isTauTy (dataConOrigArgTys con)) -- 5.
= bad "has a higher-rank type"
| otherwise
= IsValid
where
(_, ex_tvs, eq_spec, theta, _, _) = dataConFullSig con
bad msg = NotValid (badCon con (text msg))
no_cons_why :: TyCon -> SDoc
no_cons_why rep_tc = quotes (pprSourceTyCon rep_tc) <+>
text "must have at least one data constructor"
cond_RepresentableOk :: Condition
cond_RepresentableOk _ _ rep_tc = canDoGenerics rep_tc
cond_Representable1Ok :: Condition
cond_Representable1Ok _ _ rep_tc = canDoGenerics1 rep_tc
cond_enumOrProduct :: Class -> Condition
cond_enumOrProduct cls = cond_isEnumeration `orCond`
(cond_isProduct `andCond` cond_args cls)
cond_args :: Class -> Condition
-- For some classes (eg Eq, Ord) we allow unlifted arg types
-- by generating specialised code. For others (eg Data) we don't.
cond_args cls _ _ rep_tc
= case bad_args of
[] -> IsValid
(ty:_) -> NotValid (hang (text "Don't know how to derive" <+> quotes (ppr cls))
2 (text "for type" <+> quotes (ppr ty)))
where
bad_args = [ arg_ty | con <- tyConDataCons rep_tc
, arg_ty <- dataConOrigArgTys con
, isUnliftedType arg_ty
, not (ok_ty arg_ty) ]
cls_key = classKey cls
ok_ty arg_ty
| cls_key == eqClassKey = check_in arg_ty ordOpTbl
| cls_key == ordClassKey = check_in arg_ty ordOpTbl
| cls_key == showClassKey = check_in arg_ty boxConTbl
| cls_key == liftClassKey = check_in arg_ty litConTbl
| otherwise = False -- Read, Ix etc
check_in :: Type -> [(Type,a)] -> Bool
check_in arg_ty tbl = any (eqType arg_ty . fst) tbl
cond_isEnumeration :: Condition
cond_isEnumeration _ _ rep_tc
| isEnumerationTyCon rep_tc = IsValid
| otherwise = NotValid why
where
why = sep [ quotes (pprSourceTyCon rep_tc) <+>
text "must be an enumeration type"
, text "(an enumeration consists of one or more nullary, non-GADT constructors)" ]
-- See Note [Enumeration types] in TyCon
cond_isProduct :: Condition
cond_isProduct _ _ rep_tc
| isProductTyCon rep_tc = IsValid
| otherwise = NotValid why
where
why = quotes (pprSourceTyCon rep_tc) <+>
text "must have precisely one constructor"
cond_functorOK :: Bool -> Bool -> Condition
-- OK for Functor/Foldable/Traversable class
-- Currently: (a) at least one argument
-- (b) don't use argument contravariantly
-- (c) don't use argument in the wrong place, e.g. data T a = T (X a a)
-- (d) optionally: don't use function types
-- (e) no "stupid context" on data type
cond_functorOK allowFunctions allowExQuantifiedLastTyVar _ _ rep_tc
| null tc_tvs
= NotValid (text "Data type" <+> quotes (ppr rep_tc)
<+> text "must have some type parameters")
| not (null bad_stupid_theta)
= NotValid (text "Data type" <+> quotes (ppr rep_tc)
<+> text "must not have a class context:" <+> pprTheta bad_stupid_theta)
| otherwise
= allValid (map check_con data_cons)
where
tc_tvs = tyConTyVars rep_tc
Just (_, last_tv) = snocView tc_tvs
bad_stupid_theta = filter is_bad (tyConStupidTheta rep_tc)
is_bad pred = last_tv `elemVarSet` exactTyCoVarsOfType pred
-- See Note [Check that the type variable is truly universal]
data_cons = tyConDataCons rep_tc
check_con con = allValid (check_universal con : foldDataConArgs (ft_check con) con)
check_universal :: DataCon -> Validity
check_universal con
| allowExQuantifiedLastTyVar
= IsValid -- See Note [DeriveFoldable with ExistentialQuantification]
-- in TcGenFunctor
| Just tv <- getTyVar_maybe (last (tyConAppArgs (dataConOrigResTy con)))
, tv `elem` dataConUnivTyVars con
, not (tv `elemVarSet` exactTyCoVarsOfTypes (dataConTheta con))
= IsValid -- See Note [Check that the type variable is truly universal]
| otherwise
= NotValid (badCon con existential)
ft_check :: DataCon -> FFoldType Validity
ft_check con = FT { ft_triv = IsValid, ft_var = IsValid
, ft_co_var = NotValid (badCon con covariant)
, ft_fun = \x y -> if allowFunctions then x `andValid` y
else NotValid (badCon con functions)
, ft_tup = \_ xs -> allValid xs
, ft_ty_app = \_ x -> x
, ft_bad_app = NotValid (badCon con wrong_arg)
, ft_forall = \_ x -> x }
existential = text "must be truly polymorphic in the last argument of the data type"
covariant = text "must not use the type variable in a function argument"
functions = text "must not contain function types"
wrong_arg = text "must use the type variable only as the last argument of a data type"
checkFlag :: LangExt.Extension -> Condition
checkFlag flag dflags _ _
| xopt flag dflags = IsValid
| otherwise = NotValid why
where
why = text "You need " <> text flag_str
<+> text "to derive an instance for this class"
flag_str = case [ flagSpecName f | f <- xFlags , flagSpecFlag f == flag ] of
[s] -> s
other -> pprPanic "checkFlag" (ppr other)
std_class_via_coercible :: Class -> Bool
-- These standard classes can be derived for a newtype
-- using the coercible trick *even if no -XGeneralizedNewtypeDeriving
-- because giving so gives the same results as generating the boilerplate
std_class_via_coercible clas
= classKey clas `elem` [eqClassKey, ordClassKey, ixClassKey, boundedClassKey]
-- Not Read/Show because they respect the type
-- Not Enum, because newtypes are never in Enum
non_coercible_class :: Class -> Bool
-- *Never* derive Read, Show, Typeable, Data, Generic, Generic1, Lift
-- by Coercible, even with -XGeneralizedNewtypeDeriving
-- Also, avoid Traversable, as the Coercible-derived instance and the "normal"-derived
-- instance behave differently if there's a non-lawful Applicative out there.
-- Besides, with roles, Coercible-deriving Traversable is ill-roled.
non_coercible_class cls
= classKey cls `elem` ([ readClassKey, showClassKey, dataClassKey
, genClassKey, gen1ClassKey, typeableClassKey
, traversableClassKey, liftClassKey ])
badCon :: DataCon -> SDoc -> SDoc
badCon con msg = text "Constructor" <+> quotes (ppr con) <+> msg
------------------------------------------------------------------
newDerivClsInst :: ThetaType -> DerivSpec theta -> TcM ClsInst
newDerivClsInst theta (DS { ds_name = dfun_name, ds_overlap = overlap_mode
, ds_tvs = tvs, ds_cls = clas, ds_tys = tys })
= newClsInst overlap_mode dfun_name tvs theta clas tys
extendLocalInstEnv :: [ClsInst] -> TcM a -> TcM a
-- Add new locally-defined instances; don't bother to check
-- for functional dependency errors -- that'll happen in TcInstDcls
extendLocalInstEnv dfuns thing_inside
= do { env <- getGblEnv
; let inst_env' = extendInstEnvList (tcg_inst_env env) dfuns
env' = env { tcg_inst_env = inst_env' }
; setGblEnv env' thing_inside }
{-
Note [Deriving any class]
~~~~~~~~~~~~~~~~~~~~~~~~~
Classic uses of a deriving clause, or a standalone-deriving declaration, are
for:
* a stock class like Eq or Show, for which GHC knows how to generate
the instance code
* a newtype, via the mechanism enabled by GeneralizedNewtypeDeriving
The DeriveAnyClass extension adds a third way to derive instances, based on
empty instance declarations.
The canonical use case is in combination with GHC.Generics and default method
signatures. These allow us to have instance declarations being empty, but still
useful, e.g.
data T a = ...blah..blah... deriving( Generic )
instance C a => C (T a) -- No 'where' clause
where C is some "random" user-defined class.
This boilerplate code can be replaced by the more compact
data T a = ...blah..blah... deriving( Generic, C )
if DeriveAnyClass is enabled.
This is not restricted to Generics; any class can be derived, simply giving
rise to an empty instance.
Unfortunately, it is not clear how to determine the context (when using a
deriving clause; in standalone deriving, the user provides the context).
GHC uses the same heuristic for figuring out the class context that it uses for
Eq in the case of *-kinded classes, and for Functor in the case of
* -> *-kinded classes. That may not be optimal or even wrong. But in such
cases, standalone deriving can still be used.
Note [Check that the type variable is truly universal]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For Functor and Traversable instances, we must check that the *last argument*
of the type constructor is used truly universally quantified. Example
data T a b where
T1 :: a -> b -> T a b -- Fine! Vanilla H-98
T2 :: b -> c -> T a b -- Fine! Existential c, but we can still map over 'b'
T3 :: b -> T Int b -- Fine! Constraint 'a', but 'b' is still polymorphic
T4 :: Ord b => b -> T a b -- No! 'b' is constrained
T5 :: b -> T b b -- No! 'b' is constrained
T6 :: T a (b,b) -- No! 'b' is constrained
Notice that only the first of these constructors is vanilla H-98. We only
need to take care about the last argument (b in this case). See Trac #8678.
Eg. for T1-T3 we can write
fmap f (T1 a b) = T1 a (f b)
fmap f (T2 b c) = T2 (f b) c
fmap f (T3 x) = T3 (f x)
We need not perform these checks for Foldable instances, however, since
functions in Foldable can only consume existentially quantified type variables,
rather than produce them (as is the case in Functor and Traversable functions.)
As a result, T can have a derived Foldable instance:
foldr f z (T1 a b) = f b z
foldr f z (T2 b c) = f b z
foldr f z (T3 x) = f x z
foldr f z (T4 x) = f x z
foldr f z (T5 x) = f x z
foldr _ z T6 = z
See Note [DeriveFoldable with ExistentialQuantification] in TcGenFunctor.
For Functor and Traversable, we must take care not to let type synonyms
unfairly reject a type for not being truly universally quantified. An
example of this is:
type C (a :: Constraint) b = a
data T a b = C (Show a) b => MkT b
Here, the existential context (C (Show a) b) does technically mention the last
type variable b. But this is OK, because expanding the type synonym C would
give us the context (Show a), which doesn't mention b. Therefore, we must make
sure to expand type synonyms before performing this check. Not doing so led to
Trac #13813.
-}
|
shlevy/ghc
|
compiler/typecheck/TcDerivUtils.hs
|
bsd-3-clause
| 36,389 | 0 | 17 | 10,504 | 5,233 | 2,826 | 2,407 | -1 | -1 |
{-# LANGUAGE PatternSynonyms #-}
module QueryArrow.FileSystem.Builtin where
import QueryArrow.Syntax.Term
import QueryArrow.Syntax.Type
import QueryArrow.Syntax.Utils
pattern FilePathPredName ns = QPredName ns [] "FILE_PATH"
pattern DirPathPredName ns = QPredName ns [] "DIR_PATH"
pattern FileIdPredName ns = QPredName ns [] "FILE_ID"
pattern DirIdPredName ns = QPredName ns [] "DIR_ID"
pattern FileModePredName ns = QPredName ns [] "FILE_MODE"
pattern DirModePredName ns = QPredName ns [] "DIR_MODE"
pattern FileNamePredName ns = QPredName ns [] "FILE_NAME"
pattern DirNamePredName ns = QPredName ns [] "DIR_NAME"
pattern FileHostPredName ns = QPredName ns [] "FILE_HOST"
pattern DirHostPredName ns = QPredName ns [] "DIR_HOST"
pattern FileSizePredName ns = QPredName ns [] "FILE_SIZE"
-- pattern FileCreateTimePredName ns = QPredName ns [] "FILE_CREATE_TIME"
-- pattern DirCreateTimePredName ns = QPredName ns [] "DIR_CREATE_TIME"
pattern FileModifyTimePredName ns = QPredName ns [] "FILE_MODIFY_TIME"
pattern DirModifyTimePredName ns = QPredName ns [] "DIR_MODIFY_TIME"
pattern FileObjectPredName ns = QPredName ns [] "FILE_OBJ"
pattern DirObjectPredName ns = QPredName ns [] "DIR_OBJ"
pattern FileContentPredName ns = QPredName ns [] "FILE_CONTENT"
pattern DirContentPredName ns = QPredName ns [] "DIR_CONTENT"
pattern FileDirPredName ns = QPredName ns [] "FILE_DIR"
pattern DirDirPredName ns = QPredName ns [] "DIR_DIR"
pattern NewFileObjectPredName ns = QPredName ns [] "NEW_FILE_OBJ"
pattern NewDirObjectPredName ns = QPredName ns [] "NEW_DIR_OBJ"
pattern FileContentRangePredName ns = QPredName ns [] "FILE_CONTENT_RANGE"
pattern FilePathPred ns = Pred (FilePathPredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "FileObject"), PTPropIO TextType])
pattern DirPathPred ns = Pred (DirPathPredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "DirObject"), PTPropIO TextType])
pattern FileIdPred ns = Pred (FileIdPredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "FileObject"), PTPropIO Int64Type])
pattern DirIdPred ns = Pred (DirIdPredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "DirObject"), PTPropIO Int64Type])
pattern FileModePred ns = Pred (FileModePredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "FileObject"), PTPropIO Int64Type])
pattern DirModePred ns = Pred (DirModePredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "DirObject"), PTPropIO Int64Type])
pattern FileNamePred ns = Pred (FileNamePredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "FileObject"), PTPropIO TextType])
pattern DirNamePred ns = Pred (DirNamePredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "DirObject"), PTPropIO TextType])
pattern FileHostPred ns = Pred (FileNamePredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "FileObject"), PTPropIO TextType])
pattern DirHostPred ns = Pred (DirNamePredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "DirObject"), PTPropIO TextType])
pattern FileSizePred ns = Pred (FileSizePredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "FileObject"), PTPropIO Int64Type])
-- pattern FileCreateTimePred ns = Pred (FileCreateTimePredName ns) (PredType PropertyPred [PTKeyI (TypeCons "FileObject"), PTPropIO Int64Type])
-- pattern DirCreateTimePred ns = Pred (DirCreateTimePredName ns) (PredType PropertyPred [PTKeyI (TypeCons "DirObject"), PTPropIO Int64Type])
pattern FileModifyTimePred ns = Pred (FileModifyTimePredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "FileObject"), PTPropIO Int64Type])
pattern DirModifyTimePred ns = Pred (DirModifyTimePredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "DirObject"), PTPropIO Int64Type])
pattern FileObjectPred ns = Pred (FileObjectPredName ns) (PredType ObjectPred [PTKeyIO (TypeCons "FileObject")])
pattern DirObjectPred ns = Pred (DirObjectPredName ns) (PredType ObjectPred [PTKeyIO (TypeCons "DirObject")])
pattern NewFileObjectPred ns = Pred (NewFileObjectPredName ns) (PredType PropertyPred [PTKeyI TextType, PTPropO (TypeCons "FileObject")])
pattern NewDirObjectPred ns = Pred (NewDirObjectPredName ns) (PredType PropertyPred [PTKeyI TextType, PTPropO (TypeCons "DirObject")])
pattern FileContentPred ns = Pred (FileContentPredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "FileObject"), PTPropIO (TypeCons "FileContent")])
pattern DirContentPred ns = Pred (DirContentPredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "DirObject"), PTKeyIO (TypeCons "DirContent")])
pattern DirDirPred ns = Pred (DirDirPredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "DirObject"), PTPropIO (TypeCons "DirObject")])
pattern FileDirPred ns = Pred (FileDirPredName ns) (PredType PropertyPred [PTPropIO (TypeCons "FileObject"), PTPropIO (TypeCons "DirObject")])
pattern FileContentRangePred ns = Pred (FileContentRangePredName ns) (PredType PropertyPred [PTKeyIO (TypeCons "FileContent"), PTPropIO Int64Type, PTPropIO Int64Type, PTPropIO ByteStringType])
|
xu-hao/QueryArrow
|
QueryArrow-db-filesystem/src/QueryArrow/FileSystem/Builtin.hs
|
bsd-3-clause
| 5,033 | 0 | 12 | 700 | 1,550 | 738 | 812 | 49 | 0 |
module ML
( module ML.Num
, module ML.V2
, module ML.V3
, module ML.V4
, module ML.M2
, module ML.M3
, module ML.M4
, module ML.Q
) where
import ML.Num
import ML.V2
import ML.V3
import ML.V4
import ML.M2
import ML.M3
import ML.M4
import ML.Q
|
jxv/ml-hs
|
src/ML.hs
|
bsd-3-clause
| 280 | 0 | 5 | 82 | 94 | 60 | 34 | 17 | 0 |
module LeapYearKata.Day2 (isLeapYear) where
isLeapYear :: Int -> Bool
isLeapYear year = (isDivisibleByFour $ div year 100) && isDivisibleByFour year
where
isDivisibleByFour :: Int -> Bool
isDivisibleByFour num = num `mod` 4 == 0
|
Alex-Diez/haskell-tdd-kata
|
old-katas/src/LeapYearKata/Day2.hs
|
bsd-3-clause
| 270 | 0 | 8 | 75 | 76 | 41 | 35 | 5 | 1 |
{-# LANGUAGE TupleSections #-}
module Language.Haskell.Liquid.Bare.RTEnv (
makeRTEnv
) where
import Prelude hiding (error)
import Data.Graph hiding (Graph)
import Data.Maybe
import qualified Control.Exception as Ex
import qualified Data.HashMap.Strict as M
import qualified Data.List as L
import Language.Fixpoint.Misc (fst3)
import Language.Fixpoint.Types (Expr(..), Symbol)
import Language.Haskell.Liquid.GHC.Misc (sourcePosSrcSpan)
import Language.Haskell.Liquid.Types.RefType (symbolRTyVar)
import Language.Haskell.Liquid.Types
import qualified Language.Haskell.Liquid.Measure as Ms
import Language.Haskell.Liquid.Bare.Env
import Language.Haskell.Liquid.Bare.Expand
import Language.Haskell.Liquid.Bare.OfType
import Language.Haskell.Liquid.Bare.Resolve
--------------------------------------------------------------------------------
makeRTEnv specs
= do makeREAliases ets
makeRTAliases rts
where
rts = (concat [(m,) <$> Ms.aliases s | (m, s) <- specs])
ets = (concat [(m,) <$> Ms.ealiases s | (m, s) <- specs])
makeRTAliases
= graphExpand buildTypeEdges expBody
where
expBody (mod, xt)
= inModule mod $
do let l = rtPos xt
let l' = rtPosE xt
body <- withVArgs l l' (rtVArgs xt) $ ofBareType l $ rtBody xt
setRTAlias (rtName xt) $ mapRTAVars symbolRTyVar $ xt { rtBody = body}
makeREAliases
= graphExpand buildExprEdges expBody
where
expBody (mod, xt)
= inModule mod $
do let l = rtPos xt
let l' = rtPosE xt
body <- withVArgs l l' (rtVArgs xt) $ resolve l =<< (expandExpr $ rtBody xt)
setREAlias (rtName xt) $ xt { rtBody = body }
graphExpand buildEdges expBody xts
= do let table = buildAliasTable xts
graph = buildAliasGraph (buildEdges table) (map snd xts)
checkCyclicAliases table graph
mapM_ expBody $ genExpandOrder table graph
--------------------------------------------------------------------------------
type AliasTable t = M.HashMap Symbol (ModName, RTAlias Symbol t)
buildAliasTable :: [(ModName, RTAlias Symbol t)] -> AliasTable t
buildAliasTable
= M.fromList . map (\(mod, rta) -> (rtName rta, (mod, rta)))
fromAliasSymbol :: AliasTable t -> Symbol -> (ModName, RTAlias Symbol t)
fromAliasSymbol table sym
= fromMaybe err $ M.lookup sym table
where
err = panic Nothing $ "fromAliasSymbol: Dangling alias symbol: " ++ show sym
type Graph t = [Node t]
type Node t = (t, t, [t])
buildAliasGraph :: (t -> [Symbol]) -> [RTAlias Symbol t] -> Graph Symbol
buildAliasGraph buildEdges
= map (buildAliasNode buildEdges)
buildAliasNode :: (t -> [Symbol]) -> RTAlias Symbol t -> Node Symbol
buildAliasNode buildEdges alias
= (rtName alias, rtName alias, buildEdges $ rtBody alias)
checkCyclicAliases :: AliasTable t -> Graph Symbol -> BareM ()
checkCyclicAliases table graph
= case mapMaybe go $ stronglyConnComp graph of
[] ->
return ()
sccs ->
Ex.throw $ map err sccs
where
go (AcyclicSCC _)
= Nothing
go (CyclicSCC vs)
= Just vs
err :: [Symbol] -> Error
err scc@(rta:_)
= ErrAliasCycle { pos = fst $ locate rta
, acycle = map locate scc
}
err []
= panic Nothing "Bare.RTEnv.checkCyclicAliases: No type aliases in reported cycle"
locate sym
= ( sourcePosSrcSpan $ rtPos $ snd $ fromAliasSymbol table sym
, pprint sym
)
genExpandOrder :: AliasTable t -> Graph Symbol -> [(ModName, RTAlias Symbol t)]
genExpandOrder table graph
= map (fromAliasSymbol table) symOrder
where
(digraph, lookupVertex, _)
= graphFromEdges graph
symOrder
= map (fst3 . lookupVertex) $ reverse $ topSort digraph
--------------------------------------------------------------------------------
ordNub :: Ord a => [a] -> [a]
ordNub = map head . L.group . L.sort
buildTypeEdges :: AliasTable BareType -> BareType -> [Symbol]
buildTypeEdges table = ordNub . go
where
go :: BareType -> [Symbol]
go (RApp c ts rs _) = go_alias (val c) ++ concatMap go ts ++ concatMap go (mapMaybe go_ref rs)
go (RFun _ t1 t2 _) = go t1 ++ go t2
go (RAppTy t1 t2 _) = go t1 ++ go t2
go (RAllE _ t1 t2) = go t1 ++ go t2
go (REx _ t1 t2) = go t1 ++ go t2
go (RAllT _ t) = go t
go (RAllP _ t) = go t
go (RAllS _ t) = go t
go (RVar _ _) = []
go (RExprArg _) = []
go (RHole _) = []
go (RRTy env _ _ t) = concatMap (go . snd) env ++ go t
go_alias c = [c | M.member c table]
-- case M.lookup c table of
-- Just _ -> [c]
-- Nothing -> [ ]
go_ref (RProp _ (RHole _)) = Nothing
go_ref (RProp _ t) = Just t
buildExprEdges table = ordNub . go
where
go :: Expr -> [Symbol]
go (EApp e1 e2) = go e1 ++ go e2
go (ENeg e) = go e
go (EBin _ e1 e2) = go e1 ++ go e2
go (EIte _ e1 e2) = go e1 ++ go e2
go (ECst e _) = go e
go (ESym _) = []
go (ECon _) = []
go (EVar v) = go_alias v
go (PAnd ps) = concatMap go ps
go (POr ps) = concatMap go ps
go (PNot p) = go p
go (PImp p q) = go p ++ go q
go (PIff p q) = go p ++ go q
go (PAll _ p) = go p
go (ELam _ e) = go e
go (PAtom _ e1 e2) = go e1 ++ go e2
go (ETApp e _) = go e
go (ETAbs e _) = go e
go (PKVar _ _) = []
go (PExist _ e) = go e
go PGrad = []
go_alias f = [f | M.member f table ]
|
ssaavedra/liquidhaskell
|
src/Language/Haskell/Liquid/Bare/RTEnv.hs
|
bsd-3-clause
| 5,742 | 0 | 15 | 1,716 | 2,160 | 1,106 | 1,054 | 132 | 21 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module React.Flux.Mui.DropDownMenu where
import Protolude
import Data.Aeson
import Data.Aeson.Casing
import Data.String (String)
import React.Flux
import React.Flux.Mui.Util
data DropDownMenu = DropDownMenu
{ dropDownMenuAnimated :: !(Maybe Bool)
, dropDownMenuAutoWidth :: !(Maybe Bool)
, dropDownMenuClassName :: !(Maybe Text)
, dropDownMenuDisabled :: !(Maybe Bool)
, dropDownMenuMaxHeight :: !(Maybe Integer)
, dropDownMenuOpenImmediately :: !(Maybe Bool)
} deriving (Generic, Show)
instance ToJSON DropDownMenu where
toJSON =
genericToJSON $ aesonDrop (length ("DropDownMenu" :: String)) camelCase
defDropDownMenu :: DropDownMenu
defDropDownMenu =
DropDownMenu
{ dropDownMenuAnimated = Just True
, dropDownMenuAutoWidth = Just True
, dropDownMenuClassName = Nothing
, dropDownMenuDisabled = Just False
, dropDownMenuMaxHeight = Just 500
, dropDownMenuOpenImmediately = Just False
}
dropDownMenu_ ::
DropDownMenu
-> [PropertyOrHandler handler]
-> ReactElementM handler ()
-> ReactElementM handler ()
dropDownMenu_ args props =
foreign_ "DropDownMenu" (fromMaybe [] (toProps args) ++ props)
|
pbogdan/react-flux-mui
|
react-flux-mui/src/React/Flux/Mui/DropDownMenu.hs
|
bsd-3-clause
| 1,216 | 0 | 11 | 194 | 313 | 173 | 140 | 48 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleContexts #-}
module Data.PList.Binary ( PList(..)
, _PBool
, _PInt
, _PReal
, _PDate
, _PData
, _PASCII
, _PUTF16
, _PUID
, _PArray
, _PDict
, decodePList
, encodePList
) where
import Prelude as P hiding (mapM)
import qualified Data.HashMap.Strict as H
import qualified Data.Vector as V hiding (replicateM)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
import Data.Binary.IEEE754
import Data.Bits
import Data.Int
import GHC.Float
import Data.Time.Clock.POSIX
import qualified Data.Text as T
import Data.Text.Encoding as TE
import Control.Monad as M hiding (mapM)
import Control.Monad.Except hiding (mapM)
import Control.Monad.Identity hiding (mapM)
import qualified Control.Monad.State as S
import Data.Bifunctor
import Control.Lens
import Data.Traversable
-- see http://opensource.apple.com/source/CF/CF-744.18/CFBinaryPList.c for reference
-- | A property list. Used by OS X and iOS
data PList = PBool Bool -- ^ boolean
| PInt Int64 -- ^ signed integer
| PReal Double -- ^ floating point
| PDate POSIXTime -- ^ date
| PData B.ByteString -- ^ binary data
| PASCII B.ByteString -- ^ ascii string
| PUTF16 T.Text -- ^ utf-16 string
| PUID Word64 -- ^ unsigned integer
| PArray (V.Vector PList) -- ^ array
| PDict (H.HashMap B.ByteString PList) -- ^ dictionary
deriving (Show, Eq)
makePrisms ''PList
-- Intemediate plist data structure
-- TODO: dont use intermediate data structure
data ImPList = IBool Bool
| IInt Int64
| IReal Double
| IDate POSIXTime
| IData B.ByteString
| IASCII B.ByteString
| IUTF16 T.Text
| IUID Word64
| IArray [Int] -- an array of references to other objects
| IDict [(Int, Int)] -- an dictionary of references
deriving (Show)
fromIntermediate :: [ImPList] -> [PList]
fromIntermediate xs = converted
where
converted = P.map convert xs
convert (IBool x) = PBool x
convert (IInt x) = PInt x
convert (IReal x) = PReal x
convert (IDate x) = PDate x
convert (IData x) = PData x
convert (IASCII x) = PASCII x
convert (IUTF16 x) = PUTF16 x
convert (IUID x) = PUID x
convert (IArray x) = PArray $ V.fromList (P.map (converted !!) x)
convert (IDict x) = PDict $ H.fromList (P.map (\(a,b) -> (unwrap (xs !! a), converted !! b)) x)
unwrap (IASCII x) = x
data Trailer = Trailer { unused1 :: Word8
, unused2 :: Word32
, shortVersion :: Word8
, offsetIntSize :: Word8
, objectRefSize :: Word8
, numObjects :: Word64
, topObject :: Word64
, offsetTableOffset :: Word64
} deriving (Show)
instance Binary Trailer where
get = Trailer <$> getWord8 <*> getWord32be <*> getWord8 <*> getWord8 <*> getWord8 <*> getWord64be <*> getWord64be <*> getWord64be
put t = do
putWord8 0
putWord32be 0
putWord8 $ shortVersion t
putWord8 $ offsetIntSize t
putWord8 $ objectRefSize t
putWord64be $ numObjects t
putWord64be $ topObject t
putWord64be $ offsetTableOffset t
trailerSize :: Int64
trailerSize = 32
decodePList :: BL.ByteString -> Either String PList
decodePList s = runExcept $ do
unless (BL.take 8 s == "bplist00") $ throwError "invalid file format, must be bplist00"
-- decode the trailer to figure out where offsets are
trailer <- decodeBinary (BL.drop (BL.length s - trailerSize) s) get
-- decode all the offsets
offsets <- decodeBinary (BL.drop (fromIntegral $ offsetTableOffset trailer) s)
(replicateM (fromIntegral $ numObjects trailer) $ getWordbe $ fromIntegral $ offsetIntSize trailer)
-- transform each offset into an object
objects <- mapM (\off -> decodeBinary (BL.drop (fromIntegral off) s) (getObject (fromIntegral $ objectRefSize trailer))) offsets
return $ fromIntermediate objects !! fromIntegral (topObject trailer)
where
thrd (_, _, c) = c
decodeBinary :: BL.ByteString -> Get a -> Except String a
decodeBinary str g = ExceptT $ Identity $ bimap thrd thrd $ runGetOrFail g str
getWordbe :: Int -> Get Int
getWordbe size = do
bytes <- replicateM size getWord8
return $ P.foldl (\b a -> shiftL b 8 .|. fromIntegral a) 0 bytes
getObject refSize = do
let getRef = getWordbe refSize
-- items are prefixed by type and size
w <- getWord8
let l = w .&. 0xf -- lower bytes are length
i = shiftR w 4 -- high bytes are type
len <- fromIntegral <$>
case l of
15 | i /= 0 && i /= 1 && i /= 2 && i /= 3 -> do
-- if size if 0xf, then the actualy size is encoded as a plist integer following this byte
plint <- getObject refSize :: Get ImPList
case plint of
IInt int -> return int
_ -> fail "Expected integer for extended length"
_ -> return $ fromIntegral l
case i of
0x0 -> case l of -- bool is encoded in length as 0b1001 for true and 0b1000 for false
8 -> return $ IBool False
9 -> return $ IBool True
_ -> fail $ "unexpected bool constant " ++ show l
0x1 -> IInt <$> case l of -- 2^i = byte length of integer TODO: support arbitrary lengths
0 -> fromIntegral <$> (get :: Get Int8)
1 -> fromIntegral <$> (get :: Get Int16)
2 -> fromIntegral <$> (get :: Get Int32)
3 -> fromIntegral <$> (get :: Get Int64)
x -> fail $ "invalid integer length: " P.++ show x
0x2 -> IReal <$> case l of -- similar encoding to integer
2 -> float2Double <$> getFloat32be
3 -> getFloat64be
_ -> fail "invalid float size"
0x3 -> IDate <$> realToFrac <$> getFloat64be
0x4 -> IData <$> getByteString len
0x5 -> IASCII <$> getByteString len
0x6 -> IUTF16 <$> (decodeUtf16BE <$> getByteString (len * 2))
0x8 -> IUID <$> case l of
0 -> fromIntegral <$> (get :: Get Word8)
1 -> fromIntegral <$> (get :: Get Word16)
2 -> fromIntegral <$> (get :: Get Word32)
3 -> fromIntegral <$> (get :: Get Word64)
_ -> fail "invalid UID length"
-- arrays and dictionaries contain references to other elements
0xa -> IArray <$> replicateM len getRef -- temp store as ints
0xd -> do
keys <- replicateM len getRef
vals <- replicateM len getRef
return $ IDict $ zip keys vals
x -> fail $ "Unexpected type: " P.++ show x
-- types as defined by apple
typeId :: PList -> Word8
typeId (PBool _) = 0x0
typeId (PInt _) = 0x1
typeId (PReal _) = 0x2
typeId (PDate _) = 0x3
typeId (PData _) = 0x4
typeId (PASCII _) = 0x5
typeId (PUTF16 _) = 0x6
typeId (PUID _) = 0x8
typeId (PArray _) = 0xa
typeId (PDict _) = 0xd
-- TODO: pack ints and words based on size
-- | How many bytes an elcoded object occupies
elemLen :: PList -> Int
elemLen (PBool _) = 0
elemLen (PInt _) = 3
elemLen (PReal _) = 3
elemLen (PDate _) = 3
elemLen (PData x) = B.length x
elemLen (PASCII x) = B.length x
elemLen (PUTF16 x) = T.length x
elemLen (PUID _) = 3
elemLen (PArray x) = V.length x
elemLen (PDict x) = H.size x
data PState = PState { _refNum :: Int
, _offset :: Int
, _objOffsets :: [Int]
} deriving (Show)
makeLenses ''PState
type PutState = S.StateT PState PutM ()
-- | binary encode a PList
encodePList :: PList -> BL.ByteString
encodePList plist = runPut $ do
(r,s) <- S.runStateT (do
putByteString' "bplist00"
putObjectOffset plist
) PState { _refNum = 1, _offset = 0, _objOffsets = [] }
let trailer = Trailer { unused1 = 0
, unused2 = 0
, shortVersion = 0
, offsetIntSize = fromIntegral $ bytesToEncode $ fromIntegral $ length $ s ^. objOffsets
, objectRefSize = fromIntegral $ bytesToEncode $ fromIntegral numRefs
, numObjects = fromIntegral numRefs
, topObject = 0
, offsetTableOffset = fromIntegral $ s ^. offset
}
S.evalStateT (mapM_ (putWordbe (offsetIntSize trailer)) (s ^. objOffsets)) PState { _refNum = 1, _offset = 0, _objOffsets = [] }
put trailer
return r
where
numRefs :: Int
numRefs = numRefs' plist + 1
numRefs' (PArray x) = V.length x + V.sum (V.map numRefs' x)
numRefs' (PDict x) = H.size x * 2 + sum (map numRefs' (H.elems x))
numRefs' _ = 0
bytesToEncode :: Int -> Int
bytesToEncode numThings = let go 0 i = i
go x i = go (shiftR x 8) (i+1)
in go numThings 0
putRef :: Int -> PutState
putRef x = do
refNum += 1
putWordbe (bytesToEncode numRefs) x
putWordbe maxSize x | maxSize <= 1 = putWord8' $ fromIntegral x
putWordbe maxSize x | maxSize <= 2 = putWord16be' $ fromIntegral x
putWordbe maxSize x | maxSize <= 4 = putWord32be' $ fromIntegral x
putWordbe maxSize x | maxSize <= 8 = putWord64be' $ fromIntegral x
putObjectLen :: PList -> PutState
putObjectLen (PBool True) = putWord8' 9
putObjectLen (PBool False) = putWord8' 8
putObjectLen x | elemLen x < 15 = putWord8' $ shiftL (typeId x) 4 .|. fromIntegral (elemLen x)
putObjectLen x = putWord8' (shiftL (typeId x) 4 .|. 0xf) >> putObject (PInt $ fromIntegral $ elemLen x)
putObjectOffset :: PList -> PutState
putObjectOffset x = do
curOffset <- use offset
objOffsets <>= [curOffset]
putObject x
putObject :: PList -> PutState
putObject x = putObjectLen x >> putObject' x
putObject' :: PList -> PutState
putObject' (PBool _) = return () -- bool is encoded with size of type
putObject' (PInt x) = putInt64' x
putObject' (PReal x) = putDouble' x
putObject' (PDate x) = putDouble' $ fromRational $ toRational x
putObject' (PData x) = putByteString' x
putObject' (PASCII x) = putByteString' x
putObject' (PUTF16 x) = putByteString' $ TE.encodeUtf16BE x
putObject' (PUID x) = putWord64' x
putObject' (PArray x) = do
ind <- use refNum
mapM_ putRef $ getRefs ind $ V.toList x
V.mapM_ putObjectOffset x
putObject' (PDict x) = do
ind <- use refNum
mapM_ putRef [ind.. ind + H.size x - 1]
mapM_ putRef $ getRefs (ind + H.size x - 1) $ H.elems x
mapM_ (putObjectOffset . PASCII) $ H.keys x
mapM_ putObjectOffset $ H.elems x
getRefs _ [] = []
getRefs ind (x:xs) = ind : getRefs (ind + numRefs' x + 1) xs
putInt64' x = do
lift $ put x
offset += 8
putWord64' x = do
lift $ put x
offset += 8
putDouble' x = do
lift $ putFloat64be x
offset += 8
putByteString' x = do
lift $ putByteString x
offset += B.length x
putWord8' x = do
lift $ putWord8 x
offset += 1
putWord16be' x = do
lift $ putWord16be x
offset += 2
putWord32be' x = do
lift $ putWord32be x
offset += 4
putWord64be' x = do
lift $ putWord64be x
offset += 8
|
tkonolige/haskell-bplist
|
src/Data/PList/Binary.hs
|
bsd-3-clause
| 12,833 | 0 | 23 | 4,684 | 3,699 | 1,877 | 1,822 | 278 | 25 |
module System.Mesos.Raw.MasterInfo where
import System.Mesos.Internal
type MasterInfoPtr = Ptr MasterInfo
foreign import ccall unsafe "ext/types.h toMasterInfo" c_toMasterInfo
:: Ptr CChar -- infoID
-> CInt -- infoIDLen
-> CUInt -- infoIP
-> Ptr CUInt -- infoPort
-> Ptr CChar -- pid
-> CInt -- pidLen
-> Ptr CChar -- hostname
-> CInt -- hostnameLen
-> Ptr CChar -- version
-> CInt -- versionLen
-> IO MasterInfoPtr
foreign import ccall unsafe "ext/types.h fromMasterInfo" c_fromMasterInfo
:: MasterInfoPtr -- info
-> Ptr (Ptr CChar) -- infoId
-> Ptr CInt -- infoIdlen
-> Ptr CUInt -- infoIP
-> Ptr CUInt -- infoPort
-> Ptr (Ptr CChar) -- pid
-> Ptr CInt -- pidLen
-> Ptr (Ptr CChar) -- hostname
-> Ptr CInt -- hostnameLen
-> Ptr (Ptr CChar) -- version
-> Ptr CInt -- versionLen
-> IO ()
foreign import ccall unsafe "ext/types.h destroyMasterInfo" c_destroyMasterInfo
:: MasterInfoPtr
-> IO ()
instance CPPValue MasterInfo where
marshal i = do
(idp, idl) <- cstring $ masterInfoId' i
(pidp, pidl) <- maybeCString $ masterInfoPid i
(hnp, hnl) <- maybeCString $ masterInfoHostname i
(verp, verl) <- maybeCString $ masterInfoVersion i
prt <- allocMaybe $ fmap CUInt $ masterInfoPort i
liftIO $ c_toMasterInfo idp (fromIntegral idl) (CUInt $ masterInfoIp i) prt pidp (fromIntegral pidl) hnp (fromIntegral hnl) verp (fromIntegral verl)
unmarshal i = do
(idpP, idlP) <- arrayPair
ipP <- alloc
portP <- alloc
(pidpP, pidlP) <- arrayPair
(hnpP, hnlP) <- arrayPair
(verpP, verlP) <- arrayPair
poke pidpP nullPtr
poke hnpP nullPtr
poke verpP nullPtr
liftIO $ c_fromMasterInfo i idpP idlP ipP portP pidpP pidlP hnpP hnlP verpP verlP
mID <- peekCString (idpP, idlP)
(CUInt ip) <- peek ipP
(CUInt port) <- peek portP
pid <- peekMaybeBS pidpP pidlP
hn <- peekMaybeBS hnpP hnlP
version <- peekMaybeBS verpP verlP
return $ MasterInfo mID ip (Just port) pid hn version
destroy = c_destroyMasterInfo
equalExceptDefaults (MasterInfo mID ip p pid hn ver) (MasterInfo mID' ip' p' pid' hn' ver') = mID == mID' && ip == ip' && defEq 5050 p p' && pid == pid' && hn == hn' && ver == ver'
|
Atidot/hs-mesos
|
src/System/Mesos/Raw/MasterInfo.hs
|
mit
| 2,231 | 0 | 18 | 510 | 767 | 380 | 387 | 59 | 0 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
module Rackspace.MailGun
( Message (..)
, sendMessage
, sendWith
) where
import Control.Monad.Catch
import Control.Monad.IO.Class
import Control.Monad.Trans.Control
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.Text
import Data.Text.Encoding
import Network.HTTP.Client.MultipartFormData
import Network.HTTP.Conduit
baseUrl :: String
baseUrl = "https://api.mailgun.net/v2"
data Message = TextMessage
{ from :: Text
, to :: Text
, cc :: Maybe Text
, bcc :: Maybe Text
, subject :: Maybe Text
, text :: Text }
| HtmlMessage
{ from :: Text
, to :: Text
, cc :: Maybe Text
, bcc :: Maybe Text
, subject :: Maybe Text
, html :: Text }
deriving (Eq, Show)
partText :: Text -> Text -> [Part]
partText name value = [ partBS name (encodeUtf8 value) ]
partMaybeText :: Text -> Maybe Text -> [Part]
partMaybeText name value = case value of
Just val -> [ partBS name (encodeUtf8 val) ]
Nothing -> []
buildTail :: Message -> [Part]
buildTail TextMessage{..} = partText "text" text
buildTail HtmlMessage{..} = partText "html" html
buildBase :: Message -> [Part]
buildBase msg = partText "from" (from msg)
++ partText "to" (to msg)
++ partMaybeText "cc" (cc msg)
++ partMaybeText "bcc" (bcc msg)
++ partMaybeText "subject" (subject msg)
++ buildTail msg
sendMessage :: (MonadIO m, MonadBaseControl IO m, MonadThrow m) =>
String -> String -> Message -> m (Response LBS.ByteString)
sendMessage domain apiKey message = do
withManager $ \manager -> do
sendWith manager domain apiKey message
sendWith :: (MonadIO m, MonadBaseControl IO m, MonadThrow m) =>
Manager -> String -> String -> Message -> m (Response LBS.ByteString)
sendWith manager domain apiKey message = do
initReq <- parseUrl $ baseUrl ++ "/" ++ domain ++ "/messages"
let authReq = applyBasicAuth "api" (BS.pack apiKey) initReq
postReq = authReq { method = "POST" }
res <- flip httpLbs manager =<<
(formDataBody (buildBase message) postReq)
return res
|
AndrewRademacher/mailgun
|
src/Rackspace/MailGun.hs
|
mit
| 2,765 | 0 | 13 | 958 | 718 | 387 | 331 | 65 | 2 |
{-# LANGUAGE BangPatterns #-}
{-| Implementation of the Ganeti Query2 server.
-}
{-
Copyright (C) 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Query.Server
( ConfigReader
, prepQueryD
, runQueryD
) where
import Control.Applicative
import Control.Concurrent
import Control.Exception
import Data.Bits (bitSize)
import Data.Maybe
import qualified Network.Socket as S
import qualified Text.JSON as J
import Text.JSON (showJSON, JSValue(..))
import System.Info (arch)
import qualified Ganeti.Constants as C
import Ganeti.Errors
import qualified Ganeti.Path as Path
import Ganeti.Daemon
import Ganeti.Objects
import qualified Ganeti.Config as Config
import Ganeti.BasicTypes
import Ganeti.Logging
import Ganeti.Luxi
import Ganeti.OpCodes (TagObject(..))
import qualified Ganeti.Query.Language as Qlang
import Ganeti.Query.Query
import Ganeti.Query.Filter (FilterConstructor, makeSimpleFilter
, makeHostnameFilter)
-- | A type for functions that can return the configuration when
-- executed.
type ConfigReader = IO (Result ConfigData)
-- | Helper for classic queries.
handleClassicQuery :: ConfigData -- ^ Cluster config
-> Qlang.ItemType -- ^ Query type
-> [Either String Integer] -- ^ Requested names
-- (empty means all)
-> [String] -- ^ Requested fields
-> Maybe FilterConstructor -- ^ the filter algorithm
-- to be used, defaults to
-- makeSimpleFilter
-> Bool -- ^ Whether to do sync queries or not
-> IO (GenericResult GanetiException JSValue)
handleClassicQuery _ _ _ _ _ True =
return . Bad $ OpPrereqError "Sync queries are not allowed" ECodeInval
handleClassicQuery cfg qkind names fields filterconstr _ = do
let fltcon = fromMaybe makeSimpleFilter filterconstr
flt = fltcon (nameField qkind) names
qr <- query cfg True (Qlang.Query qkind fields flt)
return $ showJSON <$> (qr >>= queryCompat)
-- | Minimal wrapper to handle the missing config case.
handleCallWrapper :: Result ConfigData -> LuxiOp -> IO (ErrorResult JSValue)
handleCallWrapper (Bad msg) _ =
return . Bad . ConfigurationError $
"I do not have access to a valid configuration, cannot\
\ process queries: " ++ msg
handleCallWrapper (Ok config) op = handleCall config op
-- | Actual luxi operation handler.
handleCall :: ConfigData -> LuxiOp -> IO (ErrorResult JSValue)
handleCall cdata QueryClusterInfo =
let cluster = configCluster cdata
hypervisors = clusterEnabledHypervisors cluster
def_hv = case hypervisors of
x:_ -> showJSON x
[] -> JSNull
bits = show (bitSize (0::Int)) ++ "bits"
arch_tuple = [bits, arch]
obj = [ ("software_version", showJSON C.releaseVersion)
, ("protocol_version", showJSON C.protocolVersion)
, ("config_version", showJSON C.configVersion)
, ("os_api_version", showJSON $ maximum C.osApiVersions)
, ("export_version", showJSON C.exportVersion)
, ("architecture", showJSON arch_tuple)
, ("name", showJSON $ clusterClusterName cluster)
, ("master", showJSON $ clusterMasterNode cluster)
, ("default_hypervisor", def_hv)
, ("enabled_hypervisors", showJSON hypervisors)
, ("hvparams", showJSON $ clusterHvparams cluster)
, ("os_hvp", showJSON $ clusterOsHvp cluster)
, ("beparams", showJSON $ clusterBeparams cluster)
, ("osparams", showJSON $ clusterOsparams cluster)
, ("ipolicy", showJSON $ clusterIpolicy cluster)
, ("nicparams", showJSON $ clusterNicparams cluster)
, ("ndparams", showJSON $ clusterNdparams cluster)
, ("diskparams", showJSON $ clusterDiskparams cluster)
, ("candidate_pool_size",
showJSON $ clusterCandidatePoolSize cluster)
, ("master_netdev", showJSON $ clusterMasterNetdev cluster)
, ("master_netmask", showJSON $ clusterMasterNetmask cluster)
, ("use_external_mip_script",
showJSON $ clusterUseExternalMipScript cluster)
, ("volume_group_name",
maybe JSNull showJSON (clusterVolumeGroupName cluster))
, ("drbd_usermode_helper",
maybe JSNull showJSON (clusterDrbdUsermodeHelper cluster))
, ("file_storage_dir", showJSON $ clusterFileStorageDir cluster)
, ("shared_file_storage_dir",
showJSON $ clusterSharedFileStorageDir cluster)
, ("maintain_node_health",
showJSON $ clusterMaintainNodeHealth cluster)
, ("ctime", showJSON $ clusterCtime cluster)
, ("mtime", showJSON $ clusterMtime cluster)
, ("uuid", showJSON $ clusterUuid cluster)
, ("tags", showJSON $ clusterTags cluster)
, ("uid_pool", showJSON $ clusterUidPool cluster)
, ("default_iallocator",
showJSON $ clusterDefaultIallocator cluster)
, ("reserved_lvs", showJSON $ clusterReservedLvs cluster)
, ("primary_ip_version",
showJSON . ipFamilyToVersion $ clusterPrimaryIpFamily cluster)
, ("prealloc_wipe_disks",
showJSON $ clusterPreallocWipeDisks cluster)
, ("hidden_os", showJSON $ clusterHiddenOs cluster)
, ("blacklisted_os", showJSON $ clusterBlacklistedOs cluster)
]
in return . Ok . J.makeObj $ obj
handleCall cfg (QueryTags kind) =
let tags = case kind of
TagCluster -> Ok . clusterTags $ configCluster cfg
TagGroup name -> groupTags <$> Config.getGroup cfg name
TagNode name -> nodeTags <$> Config.getNode cfg name
TagInstance name -> instTags <$> Config.getInstance cfg name
in return (J.showJSON <$> tags)
handleCall cfg (Query qkind qfields qfilter) = do
result <- query cfg True (Qlang.Query qkind qfields qfilter)
return $ J.showJSON <$> result
handleCall _ (QueryFields qkind qfields) = do
let result = queryFields (Qlang.QueryFields qkind qfields)
return $ J.showJSON <$> result
handleCall cfg (QueryNodes names fields lock) =
handleClassicQuery cfg (Qlang.ItemTypeOpCode Qlang.QRNode)
(map Left names) fields (Just makeHostnameFilter) lock
handleCall cfg (QueryGroups names fields lock) =
handleClassicQuery cfg (Qlang.ItemTypeOpCode Qlang.QRGroup)
(map Left names) fields Nothing lock
handleCall cfg (QueryJobs names fields) =
handleClassicQuery cfg (Qlang.ItemTypeLuxi Qlang.QRJob)
(map (Right . fromIntegral . fromJobId) names) fields Nothing False
handleCall _ op =
return . Bad $
GenericError ("Luxi call '" ++ strOfOp op ++ "' not implemented")
-- | Given a decoded luxi request, executes it and sends the luxi
-- response back to the client.
handleClientMsg :: Client -> ConfigReader -> LuxiOp -> IO Bool
handleClientMsg client creader args = do
cfg <- creader
logDebug $ "Request: " ++ show args
call_result <- handleCallWrapper cfg args
(!status, !rval) <-
case call_result of
Bad err -> do
logWarning $ "Failed to execute request " ++ show args ++ ": "
++ show err
return (False, showJSON err)
Ok result -> do
-- only log the first 2,000 chars of the result
logDebug $ "Result (truncated): " ++ take 2000 (J.encode result)
logInfo $ "Successfully handled " ++ strOfOp args
return (True, result)
sendMsg client $ buildResponse status rval
return True
-- | Handles one iteration of the client protocol: receives message,
-- checks it for validity and decodes it, returns response.
handleClient :: Client -> ConfigReader -> IO Bool
handleClient client creader = do
!msg <- recvMsgExt client
case msg of
RecvConnClosed -> logDebug "Connection closed" >> return False
RecvError err -> logWarning ("Error during message receiving: " ++ err) >>
return False
RecvOk payload ->
case validateCall payload >>= decodeCall of
Bad err -> do
let errmsg = "Failed to parse request: " ++ err
logWarning errmsg
sendMsg client $ buildResponse False (showJSON errmsg)
return False
Ok args -> handleClientMsg client creader args
-- | Main client loop: runs one loop of 'handleClient', and if that
-- doesn't repot a finished (closed) connection, restarts itself.
clientLoop :: Client -> ConfigReader -> IO ()
clientLoop client creader = do
result <- handleClient client creader
if result
then clientLoop client creader
else closeClient client
-- | Main loop: accepts clients, forks an I/O thread to handle that
-- client, and then restarts.
mainLoop :: ConfigReader -> S.Socket -> IO ()
mainLoop creader socket = do
client <- acceptClient socket
_ <- forkIO $ clientLoop client creader
mainLoop creader socket
-- | Function that prepares the server socket.
prepQueryD :: Maybe FilePath -> IO (FilePath, S.Socket)
prepQueryD fpath = do
def_socket <- Path.defaultQuerySocket
let socket_path = fromMaybe def_socket fpath
cleanupSocket socket_path
s <- describeError "binding to the Luxi socket"
Nothing (Just socket_path) $ getServer socket_path
return (socket_path, s)
-- | Main function that runs the query endpoint.
runQueryD :: (FilePath, S.Socket) -> ConfigReader -> IO ()
runQueryD (socket_path, server) creader =
finally
(mainLoop creader server)
(closeServer socket_path server)
|
dblia/nosql-ganeti
|
src/Ganeti/Query/Server.hs
|
gpl-2.0
| 10,460 | 0 | 18 | 2,672 | 2,342 | 1,217 | 1,125 | 187 | 5 |
module Main where
import qualified Api.Server as S
main :: IO ()
main = S.main
|
Geeroar/ut-haskell
|
src/Main.hs
|
apache-2.0
| 81 | 0 | 6 | 17 | 29 | 18 | 11 | 4 | 1 |
{-|
Module : Models.TimedAutomaton
Description : A (simplified) type for Timed Automata (TA) extended as in the UPPAAL tool (and XTA format).
Copyright : (c) 2017 Pascal Poizat
License : Apache-2.0 (see the file LICENSE)
Maintainer : [email protected]
Stability : experimental
Portability : unknown
-}
{-# LANGUAGE FlexibleInstances #-}
module Models.TimedAutomaton (
-- * constructors
Clock(..)
, VariableType(..)
, VariableName(..)
, VariableTyping(..)
, Bounds(..)
, Expression(..)
, VariableAssignment(..)
, Location(..)
, ClockOperator(..)
, ClockConstraint(..)
, ClockReset(..)
, Edge(..)
, TimedAutomaton(..)
, TimedAutomataNetwork(..)
, ToXta
-- * validity checking
, isValidTA
-- * get/set
, isCommitted
, isUrgent
, setCommitted
, setUrgent
-- * modifications
, name
, rename
, prefixBy
, suffixBy
, relabel
, rename'
, addObservers
-- * model to text transformations
, asXta)
where
import Data.List (delete)
import Data.Map as M (Map (..), fromList, keys,
member, (!))
import Data.Monoid (Any (..), getAny, (<>))
import Data.Set as S (fromList)
import Helpers (allIn, removeDuplicates)
import Models.TCommunication (TCommunication (..))
import Models.Events (CTIOEvent (..), TIOEvent (..))
import Models.Internal (Internal (..))
import Models.Name (Name (..), isValidName)
import Models.Named (Named (..))
import Numeric.Natural
import Transformations.ModelToText (foldMapToString,
foldMapToString')
import Transformations.Substitution (Substitution, apply)
{-|
A clock.
-}
newtype Clock =
Clock String
deriving (Eq, Ord, Show)
{-|
A location.
-}
newtype Location b =
Location b
deriving (Eq, Ord, Show)
{-|
A variable type, used for variables.
-}
data VariableType = IntType Bounds
| BoolType
deriving (Eq, Show)
{-|
Boundaries for the integer type.
This is a simplified version of the UPPAAL model (no constants).
-}
data Bounds = NoBounds
| Bounds
{ lowerBound :: Int
, higherBound :: Int}
deriving (Eq, Show)
{-|
Variable names are names over String.
-}
type VariableName = Name String
{-|
Variables are given as a name, a type and possibly an initialization.
-}
data VariableTyping = VariableTyping
{ varname :: VariableName
, vartype :: VariableType
, varinit :: Maybe Expression}
deriving (Eq, Show)
{-|
An expression used in assignments.
The expression is abstracted as a String.
We suppose this String is correct (type and use of variables).
-}
newtype Expression = Expression String
deriving (Eq, Ord, Show)
{-|
An assignment for a variable.
-}
data VariableAssignment = VariableAssignment
{ variable :: VariableName
, value :: Expression}
deriving (Eq, Ord, Show)
{-|
A clock comparison operator.
-}
data ClockOperator
= LT
| GT
| LE
| GE
| EQ
deriving (Eq, Ord, Show)
{-|
A clock constraint.
-}
data ClockConstraint = ClockConstraint
{ ccclock :: Clock -- ^ clock
, ccoperator :: ClockOperator -- ^ comparison operator
, ccvalue :: Natural -- ^ value to compare to
} deriving (Eq, Ord, Show)
{-|
A clock reset (resets a clock to 0).
-}
newtype ClockReset = ClockReset
{ rclock :: Clock
} deriving (Eq, Ord, Show)
{-|
An edge with actions of type a between locations of type b.
-}
data Edge a b = Edge
{ source :: Location b -- ^ source location
, action :: a -- ^ action
, guard :: [ClockConstraint] -- ^ guard
, resets :: [ClockReset] -- ^ set of clocks to reset
, assignments :: [VariableAssignment] -- ^ sequence of assignmentd
, target :: Location b -- ^ target location
} deriving (Ord, Show)
{-|
Instance of Eq for edges.
Two edges are == up tp reordering in guard and resets.
-}
instance (Eq a, Eq b) => Eq (Edge a b) where
(Edge s a gs rs as t) == (Edge s' a' gs' rs' as' t') =
(s == s') &&
(a == a') &&
(S.fromList gs == S.fromList gs') &&
(S.fromList rs == S.fromList rs') &&
(S.fromList as == S.fromList as') &&
(t == t')
{-|
A timed automaton (TA).
A TA is generic on a, the the type of actions on edges,
and on b, the type of locations.
-}
data TimedAutomaton a b = TimedAutomaton
{ mid :: Name String -- ^ id of the model
, locations :: [Location b] -- ^ locations
, initialLocation :: Location b -- ^ initial location
, committedLocations :: [Location b] -- ^ committed locations
, urgentLocations :: [Location b] -- ^ urgent locations
, clocks :: [Clock] -- ^ clocks
, variables :: Map VariableName VariableTyping -- ^ variables
, actions :: [a] -- ^ actions
, edges :: [Edge a b] -- ^ edges
, invariants :: [(Location b, [ClockConstraint])] -- ^ invariants
}
{-|
Network of TAs.
-}
newtype TimedAutomataNetwork a b =
TimedAutomataNetwork [TimedAutomaton a b]
deriving (Show)
{-|
Instance of Show for TAs.
-}
instance (Ord a, Ord b, ToXta a, ToXta b, TCommunication a) =>
Show (TimedAutomaton a b) where
show = asXta
{-|
Instance of Eq for TAs.
TODO: add treatment for invariants
-}
instance (Ord a, Ord b) => Eq (TimedAutomaton a b) where
(TimedAutomaton i ls l0 cls uls cs vs as es _) == (TimedAutomaton i' ls' l0' cls' uls' cs' vs' as' es' _) =
and
[ i == i'
, ls == ls'
, l0 == l0'
, cls == cls'
, uls == uls'
, cs == cs'
, vs == vs'
, as == as'
, es == es'
]
{-|
Instance of Named for TAs.
-}
instance Named (TimedAutomaton a b) where
name = mid
rename n (TimedAutomaton _ ls l0 cls uls cs vs as es is) =
TimedAutomaton n ls l0 cls uls cs vs as es is
{-|
Check the validity of a TA.
A TA is valid iff:
- the model id is not empty
- the set of actions is not empty
- the set of locations is not empty
- the sets of urgent and committed locations are disjoint
- the union of the urgent and committed locations is included in the locationd
- the initial location is in the set of locations
- the source location of each edge is in the set of locations
- the label of each transition is in the alphabet
- the target location of each edge is in the set of locations
- the resets of each edge are in the set of clocks
- the assignments of each edge are over the variables
- TODO: the keyset of the invariants is equal to the set of locations
-}
isValidTA :: (Eq a, Eq b) => TimedAutomaton a b -> Bool
isValidTA (TimedAutomaton i ls l0 cls uls cs vs as es _) = and
[ isValidName i
, not . null $ as
, not . null $ ls
, l0 `elem` ls
, not . getAny $ foldMap (Any . elem' uls) cls
, cls `allIn` ls
, uls `allIn` ls
, (source <$> es) `allIn` ls
, (action <$> es) `allIn` as
, (target <$> es) `allIn` ls
, (rclock <$> foldMap resets es) `allIn` cs
, (variable <$> foldMap assignments es) `allIn` keys vs
]
where xs `elem'` x = x `elem` xs
{-|
Relabel actions in a TA.
-}
relabel :: (Ord a) => Substitution a -> TimedAutomaton a b -> TimedAutomaton a b
relabel sigma (TimedAutomaton i ls l0 cls uls cs vs as es is) = TimedAutomaton
i
ls
l0
cls
uls
cs
vs
(apply sigma <$> as)
(relabelE sigma <$> es)
is
where relabelE sig (Edge s a gs rs as' s') = Edge s (apply sig a) gs rs as' s'
{-|
Check if a location is committed.
-}
isCommitted :: Eq b => TimedAutomaton a b -> Location b -> Bool
isCommitted (TimedAutomaton i ls l0 cls uls cs vs as es is) l =
l `elem` ls && l `elem` cls
{-|
Check if a location is urgent.
-}
isUrgent :: Eq b => TimedAutomaton a b -> Location b -> Bool
isUrgent (TimedAutomaton i ls l0 cls uls cs vs as es is) l =
l `elem` ls && l `elem` uls
{-|
Set a location to be committed.
Supposes the timed automaton is valid.
-}
setCommitted :: Eq b => TimedAutomaton a b -> Location b -> TimedAutomaton a b
setCommitted t@(TimedAutomaton i ls l0 cls uls cs vs as es is) l
| l `notElem` ls = t
| isCommitted t l = t
| not (isUrgent t l) = TimedAutomaton i ls l0 cls' uls cs vs as es is
| otherwise = TimedAutomaton i ls l0 cls' uls' cs vs as es is
where
cls' = l : cls
uls' = delete l uls
{-|
Set a location to be urgent.
Supposes the timed automaton is valid.
-}
setUrgent :: Eq b => TimedAutomaton a b -> Location b -> TimedAutomaton a b
setUrgent t@(TimedAutomaton i ls l0 cls uls cs vs as es is) l
| l `notElem` ls = t
| isUrgent t l = t
| not (isCommitted t l) = TimedAutomaton i ls l0 cls uls' cs vs as es is
| otherwise = TimedAutomaton i ls l0 cls' uls' cs vs as es is
where
cls' = delete l cls
uls' = l : uls
{-|
Rename the TA (using a substitution).
-}
rename' :: Substitution (Name String)
-> TimedAutomaton a b
-> TimedAutomaton a b
rename' sigma t = rename (apply sigma $ name t) t
{-|
Add observers for actions.
This means:
- adding a local integer variable "done", initialized at 0
- defining a mapping m between actions as and identifiers (integers [1..|as|]
- setting done=m(a) for each edge with action a on it
-}
addObservers :: Ord a => TimedAutomaton a b -> TimedAutomaton a b
addObservers (TimedAutomaton i ls l0 cls uls cs vs as es is) = TimedAutomaton
i
ls
l0
cls
uls
cs
vs'
as
es'
is
where
vs' = M.fromList [(varname, var)]
es' = addObserver <$> es
--
varname = Name ["done"]
var = VariableTyping varname (IntType bounds) (Just $ Expression "0")
bounds = Bounds 0 (length as)
m = M.fromList $ zip as (show <$> [1 .. (length as)])
--
addObserver (Edge s a gs rs as t) = Edge s a gs rs as' t
where
as' = if member a m
then VariableAssignment varname (Expression (m ! a)):as
else as
{-|
Get the invariant for a location.
-}
getInvariantForLocation :: Ord b
=> [(Location b, [ClockConstraint])]
-> Location b
-> [ClockConstraint]
getInvariantForLocation is l = foldMap snd . filter ((== l) . fst) $ is
{-|
Class for what can be exported in the XTA format.
-}
class Show t =>
ToXta t
where
asXta :: t -> String
{-# MINIMAL asXta #-}
{-|
Symbol in the XTA format for reception.
-}
xtaREC :: String
xtaREC = "?"
{-|
Symbol in the XTA format for emission.
-}
xtaSEND :: String
xtaSEND = "!"
{-|
ToXta instance for names.
-}
instance ToXta (Name String) where
asXta (Name []) = "_"
asXta (Name ns) = foldMapToString' "_" id ns
{-|
ToXta instance for Natural.
-}
instance ToXta Natural where
asXta = show
{-|
ToXta instance for Int.
-}
instance ToXta Int where
asXta = show
{-|
ToXta instance for [Char].
-}
instance ToXta [Char] where
asXta = id
{-|
ToXta instance for expressions.
-}
instance ToXta Expression where
asXta (Expression e) = asXta e
{-|
ToXta instance for bounds.
-}
instance ToXta Bounds where
asXta NoBounds = ""
asXta (Bounds b1 b2) = "[" ++ asXta b1 ++ "," ++ asXta b2 ++ "]"
{-|
ToXta instance for variable types.
-}
instance ToXta VariableType where
asXta (IntType b) = "int" ++ asXta b
asXta BoolType = "bool"
{-|
ToXta instance for variable typings.
-}
instance ToXta VariableTyping where
asXta (VariableTyping v t (Just e)) = asXta t ++ " " ++ asXta v ++ " = " ++ asXta e ++ ";"
asXta (VariableTyping v t Nothing) = asXta t ++ " " ++ asXta v ++ ";"
{-|
ToXta instance for variable assignments.
-}
instance ToXta VariableAssignment where
asXta (VariableAssignment v e) = asXta v ++ " = " ++ asXta e
{-|
ToXta instance for clocks.
-}
instance ToXta Clock where
asXta (Clock c) = "c_" ++ asXta c
{-|
ToXta instance for clock resets.
-}
instance ToXta ClockReset where
asXta (ClockReset c) = asXta c ++ " = 0"
{-|
ToXta instance for clock constraints.
-}
instance ToXta ClockConstraint where
asXta (ClockConstraint c op v) = asXta c <> " " <> asXta op <> " " <> asXta v
{-|
ToXta instance for clock operators.
-}
instance ToXta ClockOperator where
asXta Models.TimedAutomaton.LT = "<"
asXta Models.TimedAutomaton.LE = "<="
asXta Models.TimedAutomaton.EQ = "=="
asXta Models.TimedAutomaton.GE = ">="
asXta Models.TimedAutomaton.GT = ">"
{-|
ToXta instance for locations.
-}
instance (ToXta a) => ToXta (Location a) where
asXta (Location l) = "l_" ++ asXta l
{-|
ToXta instance for IO events.
-}
instance (ToXta a) => ToXta (TIOEvent a) where
asXta TTau = ""
asXta (TReceive a) = asXta a
asXta (TSend a) = asXta a
{-|
ToXta instance for CIO events.
-}
instance (ToXta a) => ToXta (CTIOEvent a) where
asXta CTTau = ""
asXta (CTReceive a) = asXta a ++ reqSuffix
asXta (CTInvoke a) = asXta a ++ reqSuffix
asXta (CTReply a) = asXta a ++ resSuffix
asXta (CTResult a) = asXta a ++ resSuffix
reqSuffix :: String
reqSuffix = "_req"
resSuffix :: String
resSuffix = "_res"
{-|
ToXta instance for edges.
-}
instance (ToXta a, ToXta b, TCommunication a) => ToXta (Edge a b) where
asXta (Edge s a gs rs as s') =
concat
[ replicate 4 ' '
, asXta s
, " -> "
, asXta s'
, " { "
, foldMapToString "guard " " && " "; " asXta gs
, asXta' a
, foldMapToString "assign " ", " "; " id ((asXta <$> as) <> (asXta <$> rs))
, "}"
]
where
asXta' e =
case () of
_
| isOutput e -> "sync " ++ asXta e ++ xtaSEND ++ "; "
| isInput e -> "sync " ++ asXta e ++ xtaREC ++ "; "
| otherwise -> ""
{-|
ToXta instance for a TA network.
-}
instance (Ord a, Ord b, ToXta a, ToXta b, TCommunication a) =>
ToXta (TimedAutomataNetwork a b) where
asXta (TimedAutomataNetwork tas) =
unlines $ [schannels] <> stas <> sinstances <> [sprocess]
where
-- define the channels
schannels = foldMapToString "chan " ", " ";" id iochannels
iochannels =
removeDuplicates $ asXta <$> foldMap (removeInternals . actions) tas
removeInternals = filter (not . isInternal)
-- define the TAs
stas = asXta <$> tas
-- get all TA ids
pids = mid <$> tas
-- create an instance for each TA
sinstances = finstancedecl <$> pids
finstancedecl pid = finstancename pid <> " = " <> asXta pid <> "();"
finstancename pid = "Process_" <> asXta pid
-- put all the instances in the system
sprocess = foldMapToString "system " ", " ";" finstancename pids
{-|
ToXta instance for TAs.
Can be used to transform a TA into the XTA format.
Given a TA t, the channels and instance parts of the XTA files
are obtained by using @ToXta (TimedAutomataNetwork [t])@ instead of @ToXta t@.
-}
instance (Ord a, Ord b, ToXta a, ToXta b, TCommunication a) =>
ToXta (TimedAutomaton a b) where
asXta (TimedAutomaton i ls l0 cls uls cs vs as es is) =
unlines $
filter
(not . null)
[ sheader
, sclocks
, svariables
, sstates
, scstates
, sustates
, sinitialization
, sedges
, sfooter
]
where
sheader = "process " <> asXta i <> "(){"
sclocks = foldMapToString "clock " ", " ";" asXta cs
svariables = foldMapToString' "\n" asXta vs
sstates = foldMapToString "state " ", " ";" (asXtaWithInvariants is) ls
scstates = foldMapToString "commit " ", " ";" asXta cls
sustates = foldMapToString "urgent " ", " ";" asXta uls
sinitialization = "init " <> asXta l0 <> ";"
sedges = foldMapToString "trans\n" ",\n" ";" asXta es
sfooter = "}"
asXtaWithInvariants is' l =
asXta l <>
foldMapToString " { " " && " " }" asXta (getInvariantForLocation is' l)
|
pascalpoizat/veca-haskell
|
src/Models/TimedAutomaton.hs
|
apache-2.0
| 15,955 | 0 | 15 | 4,389 | 4,347 | 2,344 | 2,003 | 333 | 2 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fil-PH">
<title>Ang Port Scan | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Mga Nilalaman</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Paghahanap</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Mga paborito</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/scripts/src/main/javahelp/org/zaproxy/zap/extension/scripts/resources/help_fil_PH/helpset_fil_PH.hs
|
apache-2.0
| 989 | 85 | 52 | 163 | 406 | 213 | 193 | -1 | -1 |
{-# LANGUAGE PatternGuards, ViewPatterns, TypeSynonymInstances, FlexibleInstances, Rank2Types #-}
module Core.Syntax where
import Core.Data (DataCon)
import Name
import Utilities
import StaticFlags
type Var = Name
data PrimOp = Add | Subtract | Multiply | Divide | Modulo | Equal | LessThan | LessThanEqual
deriving (Eq, Ord, Show)
data AltCon = DataAlt DataCon [Var] | LiteralAlt Literal | DefaultAlt (Maybe Var)
deriving (Eq, Show)
-- Note [Case wildcards]
-- ~~~~~~~~~~~~~~~~~~~~~
--
-- Simon thought that I should use the variable in the DefaultAlt to agressively rewrite occurences of a scrutinised variable.
-- The motivation is that this lets us do more inlining above the case. For example, take this code fragment from foldl':
--
-- let n' = c n y
-- in case n' of wild -> foldl' c n' ys
--
-- If we rewrite, n' becomes linear:
--
-- let n' = c n y
-- in case n' of wild -> foldl c wild ys
--
-- This lets us potentially inline n' directly into the scrutinee position (operationally, this prevent creation of a thunk for n').
-- However, I don't think that this particular form of improving linearity helps the supercompiler. We only want to inline n' in
-- somewhere if it meets some interesting context, with which it can cancel. But if we are creating an update frame for n' at all,
-- it is *probably* because we had no information about what it evaluated to.
--
-- An interesting exception is when n' binds a case expression:
--
-- let n' = case unk of T -> F; F -> T
-- in case (case n' of T -> F; F -> T) of
-- wild -> e[n']
--
-- You might think that we want n' to be linear so we can inline it into the case on it. However, the splitter will save us and produce:
--
-- case unk of
-- T -> let n' = F
-- in case (case n' of T -> F; F -> T) of wild -> e[n']
-- F -> let n' = T
-- in case (case n' of T -> F; F -> T) of wild -> e[n']
--
-- Since we now know the form of n', everything works out nicely.
--
-- Conclusion: I don't think rewriting to use the case wildcard buys us anything at all.
data Literal = Int Integer | Char Char
deriving (Eq, Show)
type Term = Identity (TermF Identity)
type TaggedTerm = Tagged (TermF Tagged)
type CountedTerm = Counted (TermF Counted)
data TermF ann = Var Var | Value (ValueF ann) | App (ann (TermF ann)) Var | PrimOp PrimOp [ann (TermF ann)] | Case (ann (TermF ann)) [AltF ann] | LetRec [(Var, ann (TermF ann))] (ann (TermF ann))
deriving (Eq, Show)
type Alt = AltF Identity
type TaggedAlt = AltF Tagged
type CountedAlt = AltF Counted
type AltF ann = (AltCon, ann (TermF ann))
type Value = ValueF Identity
type TaggedValue = ValueF Tagged
type CountedValue = ValueF Counted
data ValueF ann = Indirect Var | Lambda Var (ann (TermF ann)) | Data DataCon [Var] | Literal Literal -- TODO: add PAPs as well? Would avoid duplicating function bodies too eagerly.
deriving (Eq, Show)
instance NFData PrimOp
instance NFData AltCon where
rnf (DataAlt a b) = rnf a `seq` rnf b
rnf (LiteralAlt a) = rnf a
rnf (DefaultAlt a) = rnf a
instance NFData Literal where
rnf (Int a) = rnf a
rnf (Char a) = rnf a
instance NFData1 ann => NFData (TermF ann) where
rnf (Var a) = rnf a
rnf (Value a) = rnf a
rnf (App a b) = rnf a `seq` rnf b
rnf (PrimOp a b) = rnf a `seq` rnf b
rnf (Case a b) = rnf a `seq` rnf b
rnf (LetRec a b) = rnf a `seq` rnf b
instance NFData1 ann => NFData (ValueF ann) where
rnf (Indirect a) = rnf a
rnf (Lambda a b) = rnf a `seq` rnf b
rnf (Data a b) = rnf a `seq` rnf b
rnf (Literal a) = rnf a
instance Pretty PrimOp where
pPrint Add = text "(+)"
pPrint Subtract = text "(-)"
pPrint Multiply = text "(*)"
pPrint Divide = text "div"
pPrint Modulo = text "mod"
pPrint Equal = text "(==)"
pPrint LessThan = text "(<)"
pPrint LessThanEqual = text "(<=)"
instance Pretty AltCon where
pPrintPrec level prec altcon = case altcon of
DataAlt dc xs -> prettyParen (prec >= appPrec) $ text dc <+> hsep (map (pPrintPrec level appPrec) xs)
LiteralAlt l -> pPrint l
DefaultAlt mb_x -> maybe (text "_") (pPrintPrec level prec) mb_x
instance Pretty Literal where
pPrintPrec level prec (Int i) | level == haskellLevel = prettyParen (prec >= appPrec) $ pPrintPrec level appPrec i <+> text ":: Int"
| otherwise = pPrintPrec level prec i
pPrintPrec _ _ (Char c) = text $ show c
instance Pretty1 ann => Pretty (TermF ann) where
pPrintPrec level prec e = case e of
LetRec xes e -> pPrintPrecLetRec level prec xes e
Var x -> pPrintPrec level prec x
Value v -> pPrintPrec level prec v
App e1 x2 -> pPrintPrecApp level prec e1 x2
PrimOp pop xs -> pPrintPrecPrimOp level prec pop xs
Case e alts | level == haskellLevel, null alts -> pPrintPrecSeq level prec e (text "undefined")
| level == haskellLevel, [(DefaultAlt Nothing, e_alt)] <- alts -> pPrintPrecSeq level prec e e_alt
| level == haskellLevel, [(DefaultAlt (Just x), e_alt)] <- alts -> pPrintPrecLetRec level prec [(x, e)] (pPrintPrecSeq level prec x e_alt)
| otherwise -> pPrintPrecCase level prec e alts
pPrintPrecSeq :: (Pretty a, Pretty b) => PrettyLevel -> Rational -> a -> b -> Doc
pPrintPrecSeq level prec e1 e2 = pPrintPrecApp level prec (PrettyFunction $ \level prec -> pPrintPrecApp level prec (name "seq") e1) e2
pPrintPrecApp :: (Pretty a, Pretty b) => PrettyLevel -> Rational -> a -> b -> Doc
pPrintPrecApp level prec e1 e2 = prettyParen (prec >= appPrec) $ pPrintPrec level opPrec e1 <+> pPrintPrec level appPrec e2
pPrintPrecPrimOp :: (Pretty a, Pretty b) => PrettyLevel -> Rational -> a -> [b] -> Doc
pPrintPrecPrimOp level prec pop xs = pPrintPrecApps level prec pop xs
pPrintPrecCase :: (Pretty a, Pretty b, Pretty c) => PrettyLevel -> Rational -> a -> [(b, c)] -> Doc
pPrintPrecCase level prec e alts = prettyParen (prec > noPrec) $ hang (text "case" <+> pPrintPrec level noPrec e <+> text "of") 2 $ vcat (map (pPrintPrecAlt level noPrec) alts)
pPrintPrecAlt :: (Pretty a, Pretty b) => PrettyLevel -> Rational -> (a, b) -> Doc
pPrintPrecAlt level _ (alt_con, alt_e) = hang (pPrintPrec level noPrec alt_con <+> text "->") 2 (pPrintPrec level noPrec alt_e)
pPrintPrecLetRec :: (Pretty a, Pretty b, Pretty c) => PrettyLevel -> Rational -> [(a, b)] -> c -> Doc
pPrintPrecLetRec level prec xes e_body
| [] <- xes = pPrintPrec level prec e_body
| otherwise = prettyParen (prec > noPrec) $ hang (if level == haskellLevel then text "let" else text "letrec") 2 (vcat [pPrintPrec level noPrec x <+> text "=" <+> pPrintPrec level noPrec e | (x, e) <- xes]) $$ text "in" <+> pPrintPrec level noPrec e_body
instance Pretty1 ann => Pretty (ValueF ann) where
pPrintPrec level prec v = case v of
Indirect x -> pPrintPrec level prec x
-- Unfortunately, this nicer pretty-printing doesn't work for general (TermF ann):
--Lambda x e -> pPrintPrecLam level prec (x:xs) e'
-- where (xs, e') = collectLambdas e
Lambda x e -> pPrintPrecLam level prec [x] e
Data dc xs -> pPrintPrecApps level prec (PrettyFunction $ \_ _ -> text dc) xs
Literal l -> pPrintPrec level prec l
pPrintPrecLam :: Pretty a => PrettyLevel -> Rational -> [Var] -> a -> Doc
pPrintPrecLam level prec xs e = prettyParen (prec > noPrec) $ text "\\" <> hsep [pPrintPrec level appPrec y | y <- xs] <+> text "->" <+> pPrintPrec level noPrec e
pPrintPrecApps :: (Pretty a, Pretty b) => PrettyLevel -> Rational -> a -> [b] -> Doc
pPrintPrecApps level prec e1 es2 = prettyParen (not (null es2) && prec >= appPrec) $ pPrintPrec level opPrec e1 <+> hsep (map (pPrintPrec level appPrec) es2)
altConBinders :: AltCon -> [Var]
altConBinders (DataAlt _ xs) = xs
altConBinders (LiteralAlt _) = []
altConBinders (DefaultAlt mb_x) = maybeToList mb_x
termToValue :: Copointed ann => ann (TermF ann) -> Maybe (ann (ValueF ann))
termToValue e = case extract e of Value v -> Just (fmap (const v) e); _ -> Nothing
termIsValue :: Copointed ann => ann (TermF ann) -> Bool
termIsValue = isValue . extract
isValue :: TermF ann -> Bool
isValue (Value _) = True
isValue _ = False
termIsCheap :: Copointed ann => ann (TermF ann) -> Bool
termIsCheap = isCheap . extract
isCheap :: Copointed ann => TermF ann -> Bool
isCheap (Var _) = True
isCheap (Value _) = True
isCheap (Case e []) = isCheap (extract e) -- NB: important for pushing down let-bound applications of ``error''
isCheap _ = False
termToVar :: Copointed ann => ann (TermF ann) -> Maybe Var
termToVar e = case extract e of
Value (Indirect x) -> Just x
Var x -> Just x
_ -> Nothing
class Symantics ann where
var :: Var -> ann (TermF ann)
value :: ValueF ann -> ann (TermF ann)
app :: ann (TermF ann) -> Var -> ann (TermF ann)
primOp :: PrimOp -> [ann (TermF ann)] -> ann (TermF ann)
case_ :: ann (TermF ann) -> [AltF ann] -> ann (TermF ann)
letRec :: [(Var, ann (TermF ann))] -> ann (TermF ann) -> ann (TermF ann)
instance Symantics Identity where
var = I . Var
value = I . Value
app e = I . App e
primOp pop es = I (PrimOp pop es)
case_ e = I . Case e
letRec xes e = I $ LetRec xes e
reify :: (forall ann. Symantics ann => ann (TermF ann)) -> Term
reify x = x
reflect :: Term -> (forall ann. Symantics ann => ann (TermF ann))
reflect (I e) = case e of
Var x -> var x
Value (Indirect x) -> value (Indirect x)
Value (Lambda x e) -> value (Lambda x (reflect e))
Value (Data dc xs) -> value (Data dc xs)
Value (Literal l) -> value (Literal l)
App e1 x2 -> app (reflect e1) x2
PrimOp pop es -> primOp pop (map reflect es)
Case e alts -> case_ (reflect e) (map (second reflect) alts)
LetRec xes e -> letRec (map (second reflect) xes) (reflect e)
literal :: Symantics ann => Literal -> ann (TermF ann)
literal = value . Literal
lambda :: Symantics ann => Var -> ann (TermF ann) -> ann (TermF ann)
lambda x = value . Lambda x
lambdas :: Symantics ann => [Var] -> ann (TermF ann) -> ann (TermF ann)
lambdas = flip $ foldr lambda
data_ :: Symantics ann => DataCon -> [Var] -> ann (TermF ann)
data_ dc = value . Data dc
apps :: Symantics ann => ann (TermF ann) -> [Var] -> ann (TermF ann)
apps = foldl app
varApps :: Symantics ann => Var -> [Var] -> ann (TermF ann)
varApps h xs = var h `apps` xs
letRecSmart :: Symantics ann => [(Var, ann (TermF ann))] -> ann (TermF ann) -> ann (TermF ann)
letRecSmart [] = id
letRecSmart xes = letRec xes
strictLet :: Symantics ann => Var -> ann (TermF ann) -> ann (TermF ann) -> ann (TermF ann)
strictLet x e1 e2 = case_ e1 [(DefaultAlt (Just x), e2)]
collectLambdas :: Term -> ([Var], Term)
collectLambdas (I (Value (Lambda x e))) = first (x:) $ collectLambdas e
collectLambdas e = ([], e)
freshFloatVar :: IdSupply -> String -> Term -> (IdSupply, Maybe (Name, Term), Name)
freshFloatVar ids _ (I (Var x)) = (ids, Nothing, x)
freshFloatVar ids s e = (ids', Just (y, e), y)
where (ids', y) = freshName ids s
freshFloatVars :: IdSupply -> String -> [Term] -> (IdSupply, [(Name, Term)], [Name])
freshFloatVars ids s es = reassociate $ mapAccumL (\ids -> associate . freshFloatVar ids s) ids es
where reassociate (ids, unzip -> (mb_floats, xs)) = (ids, catMaybes mb_floats, xs)
associate (ids, mb_float, x) = (ids, (mb_float, x))
|
batterseapower/mini-ghc
|
Core/Syntax.hs
|
bsd-3-clause
| 11,795 | 0 | 17 | 2,991 | 4,334 | 2,191 | 2,143 | 178 | 9 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
import SDL.Pal
import Graphics.GL.Pal
import Halive.Utils
import Control.Monad.Reader
import Data.Time
import Data.Foldable
-- import System.Random
data Uniforms = Uniforms
{ uMVP :: UniformLocation (M44 GLfloat)
} deriving Data
randomPositions :: GLsizei -> GLfloat -> IO [V3 GLfloat]
randomPositions instanceCount t = forM [0..instanceCount-1] $ \i -> do
let x = fromIntegral $ (i `div` 100) - 50
y = fromIntegral $ (i `mod` 100) - 50
return (V3 x (y + sin (t + fromIntegral i)) 0)
main :: IO ()
main = do
win <- reacquire 0 $ createGLWindow "Geometry Test"
shader <- createShaderProgram "test/geoInstanced.vert" "test/geo.frag"
cubeGeo <- cubeGeometry 0.5 1
cubeShape <- makeShape cubeGeo shader
let numInstances = 1000
initialOffsets <- randomPositions numInstances 0
positionsBuffer <- bufferData GL_DYNAMIC_DRAW (concatMap toList initialOffsets)
--iBuffer <- bufferData GL_DYNAMIC_DRAW (replicate (fromIntegral numInstances) 5 :: [GLint])
withShape cubeShape $ do
withArrayBuffer positionsBuffer $
assignFloatAttributeInstanced shader "aInstancePosition" GL_FLOAT 3
--withArrayBuffer iBuffer $
-- assignIntegerAttributeInstanced shader "aInstancePosI" GL_INT 1
glEnable GL_DEPTH_TEST
glClearColor 0.0 0.0 0.1 1
whileWindow win $ \events -> do
projection <- getWindowProjection win 45 0.1 1000
(x,y,w,h) <- getWindowViewport win
glViewport x y w h
glClear (GL_COLOR_BUFFER_BIT .|. GL_DEPTH_BUFFER_BIT)
let view = viewMatrix (V3 0 0 100) (axisAngle (V3 0 1 0) 0)
t <- (*10) . realToFrac . utctDayTime <$> getCurrentTime
newOffsets <- randomPositions numInstances t
bufferSubData positionsBuffer (concatMap toList newOffsets)
--bufferSubData iBuffer (replicate (fromIntegral numInstances) 0 :: [GLint])
let model = mkTransformation (axisAngle (V3 1 1 0) 1) (V3 0 1 0)
withShape cubeShape $ do
Uniforms{..} <- asks sUniforms
uniformM44 uMVP (projection !*! view !*! model)
drawShapeInstanced numInstances
glSwapWindow win
|
lukexi/gl-pal
|
test/TestInstanced.hs
|
bsd-3-clause
| 2,380 | 3 | 18 | 625 | 636 | 306 | 330 | 46 | 1 |
{-# LANGUAGE CPP, FlexibleInstances, IncoherentInstances, NamedFieldPuns,
NoImplicitPrelude, OverlappingInstances, TemplateHaskell,
UndecidableInstances #-}
{-|
Module: Data.Aeson.TH
Copyright: (c) 2011-2015 Bryan O'Sullivan
(c) 2011 MailRank, Inc.
License: Apache
Stability: experimental
Portability: portable
Functions to mechanically derive 'ToJSON' and 'FromJSON' instances. Note that
you need to enable the @TemplateHaskell@ language extension in order to use this
module.
An example shows how instances are generated for arbitrary data types. First we
define a data type:
@
data D a = Nullary
| Unary Int
| Product String Char a
| Record { testOne :: Double
, testTwo :: Bool
, testThree :: D a
} deriving Eq
@
Next we derive the necessary instances. Note that we make use of the
feature to change record field names. In this case we drop the first 4
characters of every field name. We also modify constructor names by
lower-casing them:
@
$('deriveJSON' 'defaultOptions'{'fieldLabelModifier' = 'drop' 4, 'constructorTagModifier' = map toLower} ''D)
@
Now we can use the newly created instances.
@
d :: D 'Int'
d = Record { testOne = 3.14159
, testTwo = 'True'
, testThree = Product \"test\" \'A\' 123
}
@
>>> fromJSON (toJSON d) == Success d
> True
Please note that you can derive instances for tuples using the following syntax:
@
-- FromJSON and ToJSON instances for 4-tuples.
$('deriveJSON' 'defaultOptions' ''(,,,))
@
-}
module Data.Aeson.TH
( -- * Encoding configuration
Options(..), SumEncoding(..), defaultOptions, defaultTaggedObject
-- * FromJSON and ToJSON derivation
, deriveJSON
, deriveToJSON
, deriveFromJSON
, mkToJSON
, mkToEncoding
, mkParseJSON
) where
--------------------------------------------------------------------------------
-- Imports
--------------------------------------------------------------------------------
-- from aeson:
import Data.Aeson ( toJSON, Object, (.=), (.:), (.:?)
, ToJSON, toEncoding, toJSON
, FromJSON, parseJSON
)
import Data.Aeson.Types ( Value(..), Parser
, Options(..)
, SumEncoding(..)
, defaultOptions
, defaultTaggedObject
)
import Data.Aeson.Types.Internal (Encoding(..))
import qualified Data.Aeson.Encode.Builder as E
import qualified Data.Aeson.Encode.Functions as E
import qualified Data.Aeson as A
-- from base:
import Control.Applicative ( pure, (<$>), (<*>) )
import Control.Monad ( return, mapM, liftM2, fail )
import Data.Bool ( Bool(False, True), otherwise, (&&) , not)
import Data.Eq ( (==) )
import Data.Function ( ($), (.) )
import Data.Functor ( fmap )
import Data.Int ( Int )
import Data.Either ( Either(Left, Right) )
import Data.List ( (++), foldl, foldl', intercalate, intersperse
, length, map, zip, genericLength, all, partition
)
import Data.Maybe ( Maybe(Nothing, Just), catMaybes )
import Data.Monoid ( (<>), mconcat )
import Prelude ( String, (-), Integer, error, foldr1, fromIntegral )
import Text.Printf ( printf )
import Text.Show ( show )
-- from unordered-containers:
import qualified Data.HashMap.Strict as H ( lookup, toList )
-- from template-haskell:
import Language.Haskell.TH
import Language.Haskell.TH.Syntax ( VarStrictType )
-- from text:
import qualified Data.Text as T ( Text, pack, unpack )
-- from vector:
import qualified Data.Vector as V ( unsafeIndex, null, length, create, fromList )
import qualified Data.Vector.Mutable as VM ( unsafeNew, unsafeWrite )
--------------------------------------------------------------------------------
-- Convenience
--------------------------------------------------------------------------------
-- | Generates both 'ToJSON' and 'FromJSON' instance declarations for the given
-- data type.
--
-- This is a convienience function which is equivalent to calling both
-- 'deriveToJSON' and 'deriveFromJSON'.
deriveJSON :: Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate 'ToJSON' and 'FromJSON'
-- instances.
-> Q [Dec]
deriveJSON opts name =
liftM2 (++)
(deriveToJSON opts name)
(deriveFromJSON opts name)
--------------------------------------------------------------------------------
-- ToJSON
--------------------------------------------------------------------------------
{-
TODO: Don't constrain phantom type variables.
data Foo a = Foo Int
instance (ToJSON a) ⇒ ToJSON Foo where ...
The above (ToJSON a) constraint is not necessary and perhaps undesirable.
-}
-- | Generates a 'ToJSON' instance declaration for the given data type.
deriveToJSON :: Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate a 'ToJSON' instance
-- declaration.
-> Q [Dec]
deriveToJSON opts name =
withType name $ \tvbs cons -> fmap (:[]) $ fromCons tvbs cons
where
fromCons :: [TyVarBndr] -> [Con] -> Q Dec
fromCons tvbs cons =
instanceD (applyCon ''ToJSON typeNames)
(classType `appT` instanceType)
[ funD 'toJSON
[ clause []
(normalB $ consToValue opts cons)
[]
]
, funD 'toEncoding
[ clause []
(normalB $ consToEncoding opts cons)
[]
]
]
where
classType = conT ''ToJSON
typeNames = map tvbName tvbs
instanceType = foldl' appT (conT name) $ map varT typeNames
-- | Generates a lambda expression which encodes the given data type as a
-- 'Value'.
mkToJSON :: Options -- ^ Encoding options.
-> Name -- ^ Name of the type to encode.
-> Q Exp
mkToJSON opts name = withType name (\_ cons -> consToValue opts cons)
-- | Generates a lambda expression which encodes the given data type
-- as a JSON string.
mkToEncoding :: Options -- ^ Encoding options.
-> Name -- ^ Name of the type to encode.
-> Q Exp
mkToEncoding opts name = withType name (\_ cons -> consToEncoding opts cons)
-- | Helper function used by both 'deriveToJSON' and 'mkToJSON'. Generates
-- code to generate a 'Value' of a number of constructors. All constructors
-- must be from the same type.
consToValue :: Options
-- ^ Encoding options.
-> [Con]
-- ^ Constructors for which to generate JSON generating code.
-> Q Exp
consToValue _ [] = error $ "Data.Aeson.TH.consToValue: "
++ "Not a single constructor given!"
-- A single constructor is directly encoded. The constructor itself may be
-- forgotten.
consToValue opts [con] = do
value <- newName "value"
lam1E (varP value) $ caseE (varE value) [argsToValue opts False con]
consToValue opts cons = do
value <- newName "value"
lam1E (varP value) $ caseE (varE value) matches
where
matches
| allNullaryToStringTag opts && all isNullary cons =
[ match (conP conName []) (normalB $ conStr opts conName) []
| con <- cons
, let conName = getConName con
]
| otherwise = [argsToValue opts True con | con <- cons]
conStr :: Options -> Name -> Q Exp
conStr opts = appE [|String|] . conTxt opts
conTxt :: Options -> Name -> Q Exp
conTxt opts = appE [|T.pack|] . conStringE opts
conStringE :: Options -> Name -> Q Exp
conStringE opts = stringE . constructorTagModifier opts . nameBase
-- | Helper function used by both 'deriveToJSON' and 'mkToEncoding'. Generates
-- code to write out a value for a number of constructors. All constructors
-- must be from the same type.
consToEncoding :: Options
-- ^ Encoding options.
-> [Con]
-- ^ Constructors for which to generate JSON generating code.
-> Q Exp
consToEncoding _ [] = error $ "Data.Aeson.TH.consToEncoding: "
++ "Not a single constructor given!"
-- A single constructor is directly encoded. The constructor itself may be
-- forgotten.
consToEncoding opts [con] = do
value <- newName "value"
lam1E (varP value) $ caseE (varE value) [argsToEncoding opts False con]
-- Encode just the name of the constructor of a sum type iff all the
-- constructors are nullary.
consToEncoding opts cons = do
value <- newName "value"
lam1E (varP value) $ caseE (varE value) matches
where
matches
| allNullaryToStringTag opts && all isNullary cons =
[ match (conP conName [])
(normalB $ [|Encoding|] `appE` encStr opts conName) []
| con <- cons
, let conName = getConName con
]
| otherwise = [argsToEncoding opts True con | con <- cons]
encStr :: Options -> Name -> Q Exp
encStr opts = appE [|E.text|] . conTxt opts
-- | If constructor is nullary.
isNullary :: Con -> Bool
isNullary (NormalC _ []) = True
isNullary _ = False
sumToValue :: Options -> Bool -> Name -> Q Exp -> Q Exp
sumToValue opts multiCons conName exp
| multiCons =
case sumEncoding opts of
TwoElemArray ->
[|Array|] `appE` ([|V.fromList|] `appE` listE [conStr opts conName, exp])
TaggedObject{tagFieldName, contentsFieldName} ->
[|A.object|] `appE` listE
[ infixApp [|T.pack tagFieldName|] [|(.=)|] (conStr opts conName)
, infixApp [|T.pack contentsFieldName|] [|(.=)|] exp
]
ObjectWithSingleField ->
[|A.object|] `appE` listE
[ infixApp (conTxt opts conName) [|(.=)|] exp
]
| otherwise = exp
-- | Generates code to generate the JSON encoding of a single constructor.
argsToValue :: Options -> Bool -> Con -> Q Match
-- Nullary constructors. Generates code that explicitly matches against the
-- constructor even though it doesn't contain data. This is useful to prevent
-- type errors.
argsToValue opts multiCons (NormalC conName []) =
match (conP conName [])
(normalB (sumToValue opts multiCons conName [e|toJSON ([] :: [()])|]))
[]
-- Polyadic constructors with special case for unary constructors.
argsToValue opts multiCons (NormalC conName ts) = do
let len = length ts
args <- mapM newName ["arg" ++ show n | n <- [1..len]]
js <- case [[|toJSON|] `appE` varE arg | arg <- args] of
-- Single argument is directly converted.
[e] -> return e
-- Multiple arguments are converted to a JSON array.
es -> do
mv <- newName "mv"
let newMV = bindS (varP mv)
([|VM.unsafeNew|] `appE`
litE (integerL $ fromIntegral len))
stmts = [ noBindS $
[|VM.unsafeWrite|] `appE`
(varE mv) `appE`
litE (integerL ix) `appE`
e
| (ix, e) <- zip [(0::Integer)..] es
]
ret = noBindS $ [|return|] `appE` varE mv
return $ [|Array|] `appE`
(varE 'V.create `appE`
doE (newMV:stmts++[ret]))
match (conP conName $ map varP args)
(normalB $ sumToValue opts multiCons conName js)
[]
-- Records.
argsToValue opts multiCons (RecC conName ts) = case (unwrapUnaryRecords opts, not multiCons, ts) of
(True,True,[(_,st,ty)]) -> argsToValue opts multiCons (NormalC conName [(st,ty)])
_ -> do
args <- mapM newName ["arg" ++ show n | (_, n) <- zip ts [1 :: Integer ..]]
let exp = [|A.object|] `appE` pairs
pairs | omitNothingFields opts = infixApp maybeFields
[|(++)|]
restFields
| otherwise = listE $ map toPair argCons
argCons = zip args ts
maybeFields = [|catMaybes|] `appE` listE (map maybeToPair maybes)
restFields = listE $ map toPair rest
(maybes, rest) = partition isMaybe argCons
maybeToPair (arg, (field, _, _)) =
infixApp (infixE (Just $ toFieldName field)
[|(.=)|]
Nothing)
[|(<$>)|]
(varE arg)
toPair (arg, (field, _, _)) =
infixApp (toFieldName field)
[|(.=)|]
(varE arg)
toFieldName field = [|T.pack|] `appE` fieldLabelExp opts field
match (conP conName $ map varP args)
( normalB
$ if multiCons
then case sumEncoding opts of
TwoElemArray -> [|toJSON|] `appE` tupE [conStr opts conName, exp]
TaggedObject{tagFieldName} ->
[|A.object|] `appE`
-- TODO: Maybe throw an error in case
-- tagFieldName overwrites a field in pairs.
infixApp (infixApp [|T.pack tagFieldName|]
[|(.=)|]
(conStr opts conName))
[|(:)|]
pairs
ObjectWithSingleField ->
[|A.object|] `appE` listE
[ infixApp (conTxt opts conName) [|(.=)|] exp ]
else exp
) []
-- Infix constructors.
argsToValue opts multiCons (InfixC _ conName _) = do
al <- newName "argL"
ar <- newName "argR"
match (infixP (varP al) conName (varP ar))
( normalB
$ sumToValue opts multiCons conName
$ [|toJSON|] `appE` listE [ [|toJSON|] `appE` varE a
| a <- [al,ar]
]
)
[]
-- Existentially quantified constructors.
argsToValue opts multiCons (ForallC _ _ con) =
argsToValue opts multiCons con
isMaybe :: (a, (b, c, Type)) -> Bool
isMaybe (_, (_, _, AppT (ConT t) _)) = t == ''Maybe
isMaybe _ = False
(<^>) :: ExpQ -> ExpQ -> ExpQ
(<^>) a b = infixApp a [|(<>)|] b
infixr 6 <^>
(<:>) :: ExpQ -> ExpQ -> ExpQ
(<:>) a b = a <^> [|E.char7 ':'|] <^> b
infixr 5 <:>
(<%>) :: ExpQ -> ExpQ -> ExpQ
(<%>) a b = a <^> [|E.char7 ','|] <^> b
infixr 4 <%>
array :: ExpQ -> ExpQ
array exp = [|Encoding|] `appE` ([|E.char7 '['|] <^> exp <^> [|E.char7 ']'|])
object :: ExpQ -> ExpQ
object exp = [|Encoding|] `appE` ([|E.char7 '{'|] <^> exp <^> [|E.char7 '}'|])
sumToEncoding :: Options -> Bool -> Name -> Q Exp -> Q Exp
sumToEncoding opts multiCons conName exp
| multiCons =
let fexp = [|fromEncoding|] `appE` exp in
case sumEncoding opts of
TwoElemArray ->
array (encStr opts conName <%> fexp)
TaggedObject{tagFieldName, contentsFieldName} ->
object $
([|E.text (T.pack tagFieldName)|] <:> encStr opts conName) <%>
([|E.text (T.pack contentsFieldName)|] <:> fexp)
ObjectWithSingleField ->
object (encStr opts conName <:> fexp)
| otherwise = exp
-- | Generates code to generate the JSON encoding of a single constructor.
argsToEncoding :: Options -> Bool -> Con -> Q Match
-- Nullary constructors. Generates code that explicitly matches against the
-- constructor even though it doesn't contain data. This is useful to prevent
-- type errors.
argsToEncoding opts multiCons (NormalC conName []) =
match (conP conName [])
(normalB (sumToEncoding opts multiCons conName [e|toEncoding ([] :: [()])|]))
[]
-- Polyadic constructors with special case for unary constructors.
argsToEncoding opts multiCons (NormalC conName ts) = do
let len = length ts
args <- mapM newName ["arg" ++ show n | n <- [1..len]]
js <- case args of
-- Single argument is directly converted.
[e] -> return ([|toEncoding|] `appE` varE e)
-- Multiple arguments are converted to a JSON array.
es ->
return (array (foldr1 (<%>) [[|E.builder|] `appE` varE x | x <- es]))
match (conP conName $ map varP args)
(normalB $ sumToEncoding opts multiCons conName js)
[]
-- Records.
argsToEncoding opts multiCons (RecC conName ts) = case (unwrapUnaryRecords opts, not multiCons, ts) of
(True,True,[(_,st,ty)]) -> argsToEncoding opts multiCons (NormalC conName [(st,ty)])
_ -> do
args <- mapM newName ["arg" ++ show n | (_, n) <- zip ts [1 :: Integer ..]]
let exp = object objBody
objBody = [|mconcat|] `appE`
([|intersperse (E.char7 ',')|] `appE` pairs)
pairs | omitNothingFields opts = infixApp maybeFields
[|(<>)|]
restFields
| otherwise = listE (map toPair argCons)
argCons = zip args ts
maybeFields = [|catMaybes|] `appE` listE (map maybeToPair maybes)
restFields = listE (map toPair rest)
(maybes, rest) = partition isMaybe argCons
maybeToPair (arg, (field, _, _)) =
infixApp (infixE (Just $ toFieldName field)
[|(.=)|]
Nothing)
[|(<$>)|]
(varE arg)
toPair (arg, (field, _, _)) =
toFieldName field <:> [|E.builder|] `appE` varE arg
toFieldName field = [|E.text|] `appE`
([|T.pack|] `appE` fieldLabelExp opts field)
match (conP conName $ map varP args)
( normalB
$ if multiCons
then case sumEncoding opts of
TwoElemArray -> array $
encStr opts conName <%> [|fromEncoding|] `appE` exp
TaggedObject{tagFieldName} -> object $
([|E.text (T.pack tagFieldName)|] <:>
encStr opts conName) <%>
objBody
ObjectWithSingleField -> object $
encStr opts conName <:> [|fromEncoding|] `appE` exp
else exp
) []
-- Infix constructors.
argsToEncoding opts multiCons (InfixC _ conName _) = do
al <- newName "argL"
ar <- newName "argR"
match (infixP (varP al) conName (varP ar))
( normalB
$ sumToEncoding opts multiCons conName
$ [|toEncoding|] `appE` listE [ [|toJSON|] `appE` varE a
| a <- [al,ar]
]
)
[]
-- Existentially quantified constructors.
argsToEncoding opts multiCons (ForallC _ _ con) =
argsToEncoding opts multiCons con
--------------------------------------------------------------------------------
-- FromJSON
--------------------------------------------------------------------------------
-- | Generates a 'FromJSON' instance declaration for the given data type.
deriveFromJSON :: Options
-- ^ Encoding options.
-> Name
-- ^ Name of the type for which to generate a 'FromJSON' instance
-- declaration.
-> Q [Dec]
deriveFromJSON opts name =
withType name $ \tvbs cons -> fmap (:[]) $ fromCons tvbs cons
where
fromCons :: [TyVarBndr] -> [Con] -> Q Dec
fromCons tvbs cons =
instanceD (applyCon ''FromJSON typeNames)
(classType `appT` instanceType)
[ funD 'parseJSON
[ clause []
(normalB $ consFromJSON name opts cons)
[]
]
]
where
classType = conT ''FromJSON
typeNames = map tvbName tvbs
instanceType = foldl' appT (conT name) $ map varT typeNames
-- | Generates a lambda expression which parses the JSON encoding of the given
-- data type.
mkParseJSON :: Options -- ^ Encoding options.
-> Name -- ^ Name of the encoded type.
-> Q Exp
mkParseJSON opts name =
withType name (\_ cons -> consFromJSON name opts cons)
-- | Helper function used by both 'deriveFromJSON' and 'mkParseJSON'. Generates
-- code to parse the JSON encoding of a number of constructors. All constructors
-- must be from the same type.
consFromJSON :: Name
-- ^ Name of the type to which the constructors belong.
-> Options
-- ^ Encoding options
-> [Con]
-- ^ Constructors for which to generate JSON parsing code.
-> Q Exp
consFromJSON _ _ [] = error $ "Data.Aeson.TH.consFromJSON: "
++ "Not a single constructor given!"
consFromJSON tName opts [con] = do
value <- newName "value"
lam1E (varP value) (parseArgs tName opts con (Right value))
consFromJSON tName opts cons = do
value <- newName "value"
lam1E (varP value) $ caseE (varE value) $
if allNullaryToStringTag opts && all isNullary cons
then allNullaryMatches
else mixedMatches
where
allNullaryMatches =
[ do txt <- newName "txt"
match (conP 'String [varP txt])
(guardedB $
[ liftM2 (,) (normalG $
infixApp (varE txt)
[|(==)|]
([|T.pack|] `appE`
conStringE opts conName)
)
([|pure|] `appE` conE conName)
| con <- cons
, let conName = getConName con
]
++
[ liftM2 (,)
(normalG [|otherwise|])
( [|noMatchFail|]
`appE` (litE $ stringL $ show tName)
`appE` ([|T.unpack|] `appE` varE txt)
)
]
)
[]
, do other <- newName "other"
match (varP other)
(normalB $ [|noStringFail|]
`appE` (litE $ stringL $ show tName)
`appE` ([|valueConName|] `appE` varE other)
)
[]
]
mixedMatches =
case sumEncoding opts of
TaggedObject {tagFieldName, contentsFieldName} ->
parseObject $ parseTaggedObject tagFieldName contentsFieldName
ObjectWithSingleField ->
parseObject $ parseObjectWithSingleField
TwoElemArray ->
[ do arr <- newName "array"
match (conP 'Array [varP arr])
(guardedB $
[ liftM2 (,) (normalG $ infixApp ([|V.length|] `appE` varE arr)
[|(==)|]
(litE $ integerL 2))
(parse2ElemArray arr)
, liftM2 (,) (normalG [|otherwise|])
(([|not2ElemArray|]
`appE` (litE $ stringL $ show tName)
`appE` ([|V.length|] `appE` varE arr)))
]
)
[]
, do other <- newName "other"
match (varP other)
( normalB
$ [|noArrayFail|]
`appE` (litE $ stringL $ show tName)
`appE` ([|valueConName|] `appE` varE other)
)
[]
]
parseObject f =
[ do obj <- newName "obj"
match (conP 'Object [varP obj]) (normalB $ f obj) []
, do other <- newName "other"
match (varP other)
( normalB
$ [|noObjectFail|]
`appE` (litE $ stringL $ show tName)
`appE` ([|valueConName|] `appE` varE other)
)
[]
]
parseTaggedObject typFieldName valFieldName obj = do
conKey <- newName "conKey"
doE [ bindS (varP conKey)
(infixApp (varE obj)
[|(.:)|]
([|T.pack|] `appE` stringE typFieldName))
, noBindS $ parseContents conKey (Left (valFieldName, obj)) 'conNotFoundFailTaggedObject
]
parse2ElemArray arr = do
conKey <- newName "conKey"
conVal <- newName "conVal"
let letIx n ix =
valD (varP n)
(normalB ([|V.unsafeIndex|] `appE`
varE arr `appE`
litE (integerL ix)))
[]
letE [ letIx conKey 0
, letIx conVal 1
]
(caseE (varE conKey)
[ do txt <- newName "txt"
match (conP 'String [varP txt])
(normalB $ parseContents txt
(Right conVal)
'conNotFoundFail2ElemArray
)
[]
, do other <- newName "other"
match (varP other)
( normalB
$ [|firstElemNoStringFail|]
`appE` (litE $ stringL $ show tName)
`appE` ([|valueConName|] `appE` varE other)
)
[]
]
)
parseObjectWithSingleField obj = do
conKey <- newName "conKey"
conVal <- newName "conVal"
caseE ([e|H.toList|] `appE` varE obj)
[ match (listP [tupP [varP conKey, varP conVal]])
(normalB $ parseContents conKey (Right conVal) 'conNotFoundFailObjectSingleField)
[]
, do other <- newName "other"
match (varP other)
(normalB $ [|wrongPairCountFail|]
`appE` (litE $ stringL $ show tName)
`appE` ([|show . length|] `appE` varE other)
)
[]
]
parseContents conKey contents errorFun =
caseE (varE conKey)
[ match wildP
( guardedB $
[ do g <- normalG $ infixApp (varE conKey)
[|(==)|]
([|T.pack|] `appE`
conNameExp opts con)
e <- parseArgs tName opts con contents
return (g, e)
| con <- cons
]
++
[ liftM2 (,)
(normalG [e|otherwise|])
( varE errorFun
`appE` (litE $ stringL $ show tName)
`appE` listE (map ( litE
. stringL
. constructorTagModifier opts
. nameBase
. getConName
) cons
)
`appE` ([|T.unpack|] `appE` varE conKey)
)
]
)
[]
]
parseNullaryMatches :: Name -> Name -> [Q Match]
parseNullaryMatches tName conName =
[ do arr <- newName "arr"
match (conP 'Array [varP arr])
(guardedB $
[ liftM2 (,) (normalG $ [|V.null|] `appE` varE arr)
([|pure|] `appE` conE conName)
, liftM2 (,) (normalG [|otherwise|])
(parseTypeMismatch tName conName
(litE $ stringL "an empty Array")
(infixApp (litE $ stringL $ "Array of length ")
[|(++)|]
([|show . V.length|] `appE` varE arr)
)
)
]
)
[]
, matchFailed tName conName "Array"
]
parseUnaryMatches :: Name -> [Q Match]
parseUnaryMatches conName =
[ do arg <- newName "arg"
match (varP arg)
( normalB $ infixApp (conE conName)
[|(<$>)|]
([|parseJSON|] `appE` varE arg)
)
[]
]
parseRecord :: Options -> Name -> Name -> [VarStrictType] -> Name -> ExpQ
parseRecord opts tName conName ts obj =
foldl' (\a b -> infixApp a [|(<*>)|] b)
(infixApp (conE conName) [|(<$>)|] x)
xs
where
x:xs = [ [|lookupField|]
`appE` (litE $ stringL $ show tName)
`appE` (litE $ stringL $ constructorTagModifier opts $ nameBase conName)
`appE` (varE obj)
`appE` ( [|T.pack|] `appE` fieldLabelExp opts field
)
| (field, _, _) <- ts
]
getValField :: Name -> String -> [MatchQ] -> Q Exp
getValField obj valFieldName matches = do
val <- newName "val"
doE [ bindS (varP val) $ infixApp (varE obj)
[|(.:)|]
([|T.pack|] `appE`
(litE $ stringL valFieldName))
, noBindS $ caseE (varE val) matches
]
-- | Generates code to parse the JSON encoding of a single constructor.
parseArgs :: Name -- ^ Name of the type to which the constructor belongs.
-> Options -- ^ Encoding options.
-> Con -- ^ Constructor for which to generate JSON parsing code.
-> Either (String, Name) Name -- ^ Left (valFieldName, objName) or
-- Right valName
-> Q Exp
-- Nullary constructors.
parseArgs tName _ (NormalC conName []) (Left (valFieldName, obj)) =
getValField obj valFieldName $ parseNullaryMatches tName conName
parseArgs tName _ (NormalC conName []) (Right valName) =
caseE (varE valName) $ parseNullaryMatches tName conName
-- Unary constructors.
parseArgs _ _ (NormalC conName [_]) (Left (valFieldName, obj)) =
getValField obj valFieldName $ parseUnaryMatches conName
parseArgs _ _ (NormalC conName [_]) (Right valName) =
caseE (varE valName) $ parseUnaryMatches conName
-- Polyadic constructors.
parseArgs tName _ (NormalC conName ts) (Left (valFieldName, obj)) =
getValField obj valFieldName $ parseProduct tName conName $ genericLength ts
parseArgs tName _ (NormalC conName ts) (Right valName) =
caseE (varE valName) $ parseProduct tName conName $ genericLength ts
-- Records.
parseArgs tName opts (RecC conName ts) (Left (_, obj)) =
parseRecord opts tName conName ts obj
parseArgs tName opts (RecC conName ts) (Right valName) = case (unwrapUnaryRecords opts,ts) of
(True,[(_,st,ty)])-> parseArgs tName opts (NormalC conName [(st,ty)]) (Right valName)
_ -> do
obj <- newName "recObj"
caseE (varE valName)
[ match (conP 'Object [varP obj]) (normalB $ parseRecord opts tName conName ts obj) []
, matchFailed tName conName "Object"
]
-- Infix constructors. Apart from syntax these are the same as
-- polyadic constructors.
parseArgs tName _ (InfixC _ conName _) (Left (valFieldName, obj)) =
getValField obj valFieldName $ parseProduct tName conName 2
parseArgs tName _ (InfixC _ conName _) (Right valName) =
caseE (varE valName) $ parseProduct tName conName 2
-- Existentially quantified constructors. We ignore the quantifiers
-- and proceed with the contained constructor.
parseArgs tName opts (ForallC _ _ con) contents =
parseArgs tName opts con contents
-- | Generates code to parse the JSON encoding of an n-ary
-- constructor.
parseProduct :: Name -- ^ Name of the type to which the constructor belongs.
-> Name -- ^ 'Con'structor name.
-> Integer -- ^ 'Con'structor arity.
-> [Q Match]
parseProduct tName conName numArgs =
[ do arr <- newName "arr"
-- List of: "parseJSON (arr `V.unsafeIndex` <IX>)"
let x:xs = [ [|parseJSON|]
`appE`
infixApp (varE arr)
[|V.unsafeIndex|]
(litE $ integerL ix)
| ix <- [0 .. numArgs - 1]
]
match (conP 'Array [varP arr])
(normalB $ condE ( infixApp ([|V.length|] `appE` varE arr)
[|(==)|]
(litE $ integerL numArgs)
)
( foldl' (\a b -> infixApp a [|(<*>)|] b)
(infixApp (conE conName) [|(<$>)|] x)
xs
)
( parseTypeMismatch tName conName
(litE $ stringL $ "Array of length " ++ show numArgs)
( infixApp (litE $ stringL $ "Array of length ")
[|(++)|]
([|show . V.length|] `appE` varE arr)
)
)
)
[]
, matchFailed tName conName "Array"
]
--------------------------------------------------------------------------------
-- Parsing errors
--------------------------------------------------------------------------------
matchFailed :: Name -> Name -> String -> MatchQ
matchFailed tName conName expected = do
other <- newName "other"
match (varP other)
( normalB $ parseTypeMismatch tName conName
(litE $ stringL expected)
([|valueConName|] `appE` varE other)
)
[]
parseTypeMismatch :: Name -> Name -> ExpQ -> ExpQ -> ExpQ
parseTypeMismatch tName conName expected actual =
foldl appE
[|parseTypeMismatch'|]
[ litE $ stringL $ nameBase conName
, litE $ stringL $ show tName
, expected
, actual
]
class (FromJSON a) => LookupField a where
lookupField :: String -> String -> Object -> T.Text -> Parser a
instance (FromJSON a) => LookupField a where
lookupField tName rec obj key =
case H.lookup key obj of
Nothing -> unknownFieldFail tName rec (T.unpack key)
Just v -> parseJSON v
instance (FromJSON a) => LookupField (Maybe a) where
lookupField _ _ = (.:?)
unknownFieldFail :: String -> String -> String -> Parser fail
unknownFieldFail tName rec key =
fail $ printf "When parsing the record %s of type %s the key %s was not present."
rec tName key
noArrayFail :: String -> String -> Parser fail
noArrayFail t o = fail $ printf "When parsing %s expected Array but got %s." t o
noObjectFail :: String -> String -> Parser fail
noObjectFail t o = fail $ printf "When parsing %s expected Object but got %s." t o
firstElemNoStringFail :: String -> String -> Parser fail
firstElemNoStringFail t o = fail $ printf "When parsing %s expected an Array of 2 elements where the first element is a String but got %s at the first element." t o
wrongPairCountFail :: String -> String -> Parser fail
wrongPairCountFail t n =
fail $ printf "When parsing %s expected an Object with a single tag/contents pair but got %s pairs."
t n
noStringFail :: String -> String -> Parser fail
noStringFail t o = fail $ printf "When parsing %s expected String but got %s." t o
noMatchFail :: String -> String -> Parser fail
noMatchFail t o =
fail $ printf "When parsing %s expected a String with the tag of a constructor but got %s." t o
not2ElemArray :: String -> Int -> Parser fail
not2ElemArray t i = fail $ printf "When parsing %s expected an Array of 2 elements but got %i elements" t i
conNotFoundFail2ElemArray :: String -> [String] -> String -> Parser fail
conNotFoundFail2ElemArray t cs o =
fail $ printf "When parsing %s expected a 2-element Array with a tag and contents element where the tag is one of [%s], but got %s."
t (intercalate ", " cs) o
conNotFoundFailObjectSingleField :: String -> [String] -> String -> Parser fail
conNotFoundFailObjectSingleField t cs o =
fail $ printf "When parsing %s expected an Object with a single tag/contents pair where the tag is one of [%s], but got %s."
t (intercalate ", " cs) o
conNotFoundFailTaggedObject :: String -> [String] -> String -> Parser fail
conNotFoundFailTaggedObject t cs o =
fail $ printf "When parsing %s expected an Object with a tag field where the value is one of [%s], but got %s."
t (intercalate ", " cs) o
parseTypeMismatch' :: String -> String -> String -> String -> Parser fail
parseTypeMismatch' conName tName expected actual =
fail $ printf "When parsing the constructor %s of type %s expected %s but got %s."
conName tName expected actual
--------------------------------------------------------------------------------
-- Utility functions
--------------------------------------------------------------------------------
-- | Boilerplate for top level splices.
--
-- The given 'Name' must be from a type constructor. Furthermore, the
-- type constructor must be either a data type or a newtype. Any other
-- value will result in an exception.
withType :: Name
-> ([TyVarBndr] -> [Con] -> Q a)
-- ^ Function that generates the actual code. Will be applied
-- to the type variable binders and constructors extracted
-- from the given 'Name'.
-> Q a
-- ^ Resulting value in the 'Q'uasi monad.
withType name f = do
info <- reify name
case info of
TyConI dec ->
case dec of
DataD _ _ tvbs cons _ -> f tvbs cons
NewtypeD _ _ tvbs con _ -> f tvbs [con]
other -> error $ "Data.Aeson.TH.withType: Unsupported type: "
++ show other
_ -> error "Data.Aeson.TH.withType: I need the name of a type."
-- | Extracts the name from a constructor.
getConName :: Con -> Name
getConName (NormalC name _) = name
getConName (RecC name _) = name
getConName (InfixC _ name _) = name
getConName (ForallC _ _ con) = getConName con
-- | Extracts the name from a type variable binder.
tvbName :: TyVarBndr -> Name
tvbName (PlainTV name ) = name
tvbName (KindedTV name _) = name
-- | Makes a string literal expression from a constructor's name.
conNameExp :: Options -> Con -> Q Exp
conNameExp opts = litE
. stringL
. constructorTagModifier opts
. nameBase
. getConName
-- | Creates a string literal expression from a record field label.
fieldLabelExp :: Options -- ^ Encoding options
-> Name
-> Q Exp
fieldLabelExp opts = litE . stringL . fieldLabelModifier opts . nameBase
-- | The name of the outermost 'Value' constructor.
valueConName :: Value -> String
valueConName (Object _) = "Object"
valueConName (Array _) = "Array"
valueConName (String _) = "String"
valueConName (Number _) = "Number"
valueConName (Bool _) = "Boolean"
valueConName Null = "Null"
applyCon :: Name -> [Name] -> Q [Pred]
applyCon con typeNames = return (map apply typeNames)
where apply t =
#if MIN_VERSION_template_haskell(2,10,0)
AppT (ConT con) (VarT t)
#else
ClassP con [VarT t]
#endif
|
abbradar/aeson
|
Data/Aeson/TH.hs
|
bsd-3-clause
| 41,015 | 0 | 24 | 15,314 | 9,613 | 5,206 | 4,407 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Language.Haskell.Liquid.GhcInterface (
-- * extract all information needed for verification
getGhcInfo
) where
import IdInfo
import InstEnv
import Bag (bagToList)
import ErrUtils
import GHC hiding (Target, desugarModule)
import DriverPhases (Phase(..))
import DriverPipeline (compileFile)
import Text.PrettyPrint.HughesPJ
import HscTypes hiding (Target)
import CoreSyn
import Class
import Var
import CoreMonad (liftIO)
import DataCon
import qualified Control.Exception as Ex
import GHC.Paths (libdir)
import System.FilePath ( replaceExtension, normalise)
import DynFlags
import Control.Monad (filterM, foldM, when, forM, forM_, liftM)
import Control.Applicative hiding (empty)
import Data.Monoid hiding ((<>))
import Data.List (find, nub)
import Data.Maybe (catMaybes, maybeToList)
import qualified Data.HashSet as S
import System.Console.CmdArgs.Verbosity (whenLoud)
import System.Directory (removeFile, createDirectoryIfMissing, doesFileExist)
import Language.Fixpoint.Types hiding (Result, Expr)
import Language.Fixpoint.Misc
import Language.Haskell.Liquid.Types
import Language.Haskell.Liquid.Errors
import Language.Haskell.Liquid.ANFTransform
import Language.Haskell.Liquid.Bare
import Language.Haskell.Liquid.GhcMisc
import Language.Haskell.Liquid.Misc
import Language.Haskell.Liquid.PrettyPrint
import Language.Haskell.Liquid.Visitors
import Language.Haskell.Liquid.CmdLine (withCabal, withPragmas)
import Language.Haskell.Liquid.Parse
import qualified Language.Haskell.Liquid.Measure as Ms
import Language.Fixpoint.Names
import Language.Fixpoint.Files
--------------------------------------------------------------------
getGhcInfo :: Config -> FilePath -> IO (Either ErrorResult GhcInfo)
--------------------------------------------------------------------
getGhcInfo cfg target = (Right <$> getGhcInfo' cfg target)
`Ex.catch` (\(e :: SourceError) -> handle e)
`Ex.catch` (\(e :: Error) -> handle e)
`Ex.catch` (\(e :: [Error]) -> handle e)
where
handle = return . Left . result
getGhcInfo' cfg0 target
= runGhc (Just libdir) $ do
liftIO $ cleanFiles target
addTarget =<< guessTarget target Nothing
(name,tgtSpec) <- liftIO $ parseSpec target
cfg <- liftIO $ withPragmas cfg0 target $ Ms.pragmas tgtSpec
cfg <- liftIO $ withCabal cfg
let paths = idirs cfg
updateDynFlags cfg
liftIO $ whenLoud $ putStrLn ("paths = " ++ show paths)
let name' = ModName Target (getModName name)
impNames <- allDepNames <$> depanal [] False
impSpecs <- getSpecs (real cfg) (totality cfg) target paths impNames [Spec, Hs, LHs]
compileCFiles =<< liftIO (foldM (\c (f,_,s) -> withPragmas c f (Ms.pragmas s)) cfg impSpecs)
impSpecs' <- forM impSpecs $ \(f,n,s) -> do
when (not $ isSpecImport n) $
addTarget =<< guessTarget f Nothing
return (n,s)
load LoadAllTargets
modguts <- getGhcModGuts1 target
hscEnv <- getSession
coreBinds <- liftIO $ anormalize (not $ nocaseexpand cfg) hscEnv modguts
let datacons = [ dataConWorkId dc
| tc <- mgi_tcs modguts
, dc <- tyConDataCons tc
]
let impVs = importVars coreBinds ++ classCons (mgi_cls_inst modguts)
let defVs = definedVars coreBinds
let useVs = readVars coreBinds
let letVs = letVars coreBinds
let derVs = derivedVars coreBinds $ fmap (fmap is_dfun) $ mgi_cls_inst modguts
logicmap <- liftIO makeLogicMap
(spec, imps, incs) <- moduleSpec cfg coreBinds (impVs ++ defVs) letVs name' modguts tgtSpec logicmap impSpecs'
liftIO $ whenLoud $ putStrLn $ "Module Imports: " ++ show imps
hqualFiles <- moduleHquals modguts paths target imps incs
return $ GI hscEnv coreBinds derVs impVs (letVs ++ datacons) useVs hqualFiles imps incs spec
makeLogicMap
= do lg <- getCoreToLogicPath
lspec <- readFile lg
return $ parseSymbolToLogic lg lspec
classCons :: Maybe [ClsInst] -> [Id]
classCons Nothing = []
classCons (Just cs) = concatMap (dataConImplicitIds . head . tyConDataCons . classTyCon . is_cls) cs
derivedVars :: CoreProgram -> Maybe [DFunId] -> [Id]
derivedVars cbs (Just fds) = concatMap (derivedVs cbs) fds
derivedVars _ Nothing = []
derivedVs :: CoreProgram -> DFunId -> [Id]
derivedVs cbs fd = concatMap bindersOf cbf ++ deps
where cbf = filter f cbs
f (NonRec x _) = eqFd x
f (Rec xes ) = any eqFd (fst <$> xes)
eqFd x = varName x == varName fd
deps :: [Id]
deps = concatMap dep $ (unfoldingInfo . idInfo <$> concatMap bindersOf cbf)
dep (DFunUnfolding _ _ e) = concatMap grapDep e
dep (CoreUnfolding {uf_tmpl = e}) = grapDep e
dep _ = []
grapDep :: CoreExpr -> [Id]
grapDep e = freeVars S.empty e
updateDynFlags cfg
= do df <- getSessionDynFlags
let df' = df { importPaths = idirs cfg ++ importPaths df
, libraryPaths = idirs cfg ++ libraryPaths df
, includePaths = idirs cfg ++ includePaths df
, profAuto = ProfAutoCalls
, ghcLink = LinkInMemory
--FIXME: this *should* be HscNothing, but that prevents us from
-- looking up *unexported* names in another source module..
, hscTarget = HscInterpreted -- HscNothing
, ghcMode = CompManager
-- prevent GHC from printing anything
, log_action = \_ _ _ _ _ -> return ()
-- , verbosity = 3
} `xopt_set` Opt_MagicHash
-- `gopt_set` Opt_Hpc
`gopt_set` Opt_ImplicitImportQualified
`gopt_set` Opt_PIC
#if __GLASGOW_HASKELL__ >= 710
`gopt_set` Opt_Debug
#endif
(df'',_,_) <- parseDynamicFlags df' (map noLoc $ ghcOptions cfg)
setSessionDynFlags $ df'' -- {profAuto = ProfAutoAll}
compileCFiles cfg
= do df <- getSessionDynFlags
setSessionDynFlags $ df { includePaths = nub $ idirs cfg ++ includePaths df
, importPaths = nub $ idirs cfg ++ importPaths df
, libraryPaths = nub $ idirs cfg ++ libraryPaths df }
hsc <- getSession
os <- mapM (\x -> liftIO $ compileFile hsc StopLn (x,Nothing)) (nub $ cFiles cfg)
df <- getSessionDynFlags
setSessionDynFlags $ df { ldInputs = map (FileOption "") os ++ ldInputs df }
mgi_namestring = moduleNameString . moduleName . mgi_module
importVars = freeVars S.empty
definedVars = concatMap defs
where
defs (NonRec x _) = [x]
defs (Rec xes) = map fst xes
------------------------------------------------------------------
-- | Extracting CoreBindings From File ---------------------------
------------------------------------------------------------------
getGhcModGuts1 :: FilePath -> Ghc MGIModGuts
getGhcModGuts1 fn = do
modGraph <- getModuleGraph
case find ((== fn) . msHsFilePath) modGraph of
Just modSummary -> do
-- mod_guts <- modSummaryModGuts modSummary
mod_p <- parseModule modSummary
mod_guts <- coreModule <$> (desugarModule =<< typecheckModule (ignoreInline mod_p))
let deriv = getDerivedDictionaries mod_guts
return $! (miModGuts (Just deriv) mod_guts)
Nothing -> exitWithPanic "Ghc Interface: Unable to get GhcModGuts"
getDerivedDictionaries cm = instEnvElts $ mg_inst_env cm
cleanFiles :: FilePath -> IO ()
cleanFiles fn
= do forM_ bins (tryIgnore "delete binaries" . removeFileIfExists)
tryIgnore "create temp directory" $ createDirectoryIfMissing False dir
where
bins = replaceExtension fn <$> ["hi", "o"]
dir = tempDirectory fn
removeFileIfExists f = doesFileExist f >>= (`when` removeFile f)
--------------------------------------------------------------------------------
-- | Desugaring (Taken from GHC, modified to hold onto Loc in Ticks) -----------
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
-- | Extracting Qualifiers -----------------------------------------------------
--------------------------------------------------------------------------------
moduleHquals mg paths target imps incs
= do hqs <- specIncludes Hquals paths incs
hqs' <- moduleImports [Hquals] paths (mgi_namestring mg : imps)
hqs'' <- liftIO $ filterM doesFileExist [extFileName Hquals target]
let rv = sortNub $ hqs'' ++ hqs ++ (snd <$> hqs')
liftIO $ whenLoud $ putStrLn $ "Reading Qualifiers From: " ++ show rv
return rv
--------------------------------------------------------------------------------
-- | Extracting Specifications (Measures + Assumptions) ------------------------
--------------------------------------------------------------------------------
moduleSpec cfg cbs vars defVars target mg tgtSpec logicmap impSpecs
= do addImports impSpecs
addContext $ IIModule $ moduleName $ mgi_module mg
env <- getSession
let specs = (target,tgtSpec):impSpecs
let imps = sortNub $ impNames ++ [ symbolString x
| (_,spec) <- specs
, x <- Ms.imports spec
]
ghcSpec <- liftIO $ makeGhcSpec cfg target cbs vars defVars exports env logicmap specs
return (ghcSpec, imps, Ms.includes tgtSpec)
where
exports = mgi_exports mg
impNames = map (getModString.fst) impSpecs
addImports = mapM (addContext . IIDecl . qualImportDecl . getModName . fst)
allDepNames = concatMap (map declNameString . ms_textual_imps)
declNameString = moduleNameString . unLoc . ideclName . unLoc
patErrorName = "PatErr"
realSpecName = "Real"
notRealSpecName = "NotReal"
getSpecs rflag tflag target paths names exts
= do fs' <- sortNub <$> moduleImports exts paths names
patSpec <- getPatSpec paths tflag
rlSpec <- getRealSpec paths rflag
let fs = patSpec ++ rlSpec ++ fs'
liftIO $ whenLoud $ putStrLn ("getSpecs: " ++ show fs)
transParseSpecs exts paths (S.singleton target) mempty (map snd fs)
getPatSpec paths totalitycheck
| totalitycheck
= (map (patErrorName, )) . maybeToList <$> moduleFile paths patErrorName Spec
| otherwise
= return []
getRealSpec paths freal
| freal
= (map (realSpecName, )) . maybeToList <$> moduleFile paths realSpecName Spec
| otherwise
= (map (notRealSpecName, )) . maybeToList <$> moduleFile paths notRealSpecName Spec
transParseSpecs _ _ _ specs []
= return specs
transParseSpecs exts paths seenFiles specs newFiles
= do newSpecs <- liftIO $ mapM (\f -> addFst3 f <$> parseSpec f) newFiles
impFiles <- moduleImports exts paths $ specsImports newSpecs
let seenFiles' = seenFiles `S.union` (S.fromList newFiles)
let specs' = specs ++ map (third noTerm) newSpecs
let newFiles' = [f | (_,f) <- impFiles, not (f `S.member` seenFiles')]
transParseSpecs exts paths seenFiles' specs' newFiles'
where
specsImports ss = nub $ concatMap (map symbolString . Ms.imports . thd3) ss
noTerm spec = spec { Ms.decr=mempty, Ms.lazy=mempty, Ms.termexprs=mempty }
third f (a,b,c) = (a,b,f c)
parseSpec :: FilePath -> IO (ModName, Ms.BareSpec)
parseSpec file
= do whenLoud $ putStrLn $ "parseSpec: " ++ file
either Ex.throw return . specParser file =<< readFile file
specParser file str
| isExtFile Spec file = specSpecificationP file str
| isExtFile Hs file = hsSpecificationP file str
| isExtFile LHs file = lhsSpecificationP file str
| otherwise = exitWithPanic $ "SpecParser: Cannot Parse File " ++ file
moduleImports :: GhcMonad m => [Ext] -> [FilePath] -> [String] -> m [(String, FilePath)]
moduleImports exts paths names
= liftM concat $ forM names $ \name -> do
map (name,) . catMaybes <$> mapM (moduleFile paths name) exts
moduleFile :: GhcMonad m => [FilePath] -> String -> Ext -> m (Maybe FilePath)
moduleFile paths name ext
| ext `elem` [Hs, LHs]
= do mg <- getModuleGraph
case find ((==name) . moduleNameString . ms_mod_name) mg of
Nothing -> liftIO $ getFileInDirs (extModuleName name ext) paths
Just ms -> return $ normalise <$> ml_hs_file (ms_location ms)
| otherwise
= liftIO $ getFileInDirs (extModuleName name ext) paths
specIncludes :: GhcMonad m => Ext -> [FilePath] -> [FilePath] -> m [FilePath]
specIncludes ext paths reqs
= do let libFile = extFileNameR ext $ symbolString preludeName
let incFiles = catMaybes $ reqFile ext <$> reqs
liftIO $ forM (libFile : incFiles) $ \f -> do
mfile <- getFileInDirs f paths
case mfile of
Just file -> return file
Nothing -> errorstar $ "cannot find " ++ f ++ " in " ++ show paths
reqFile ext s
| isExtFile ext s
= Just s
| otherwise
= Nothing
instance PPrint GhcSpec where
pprint spec = (text "******* Target Variables ********************")
$$ (pprint $ tgtVars spec)
$$ (text "******* Type Signatures *********************")
$$ (pprintLongList $ tySigs spec)
$$ (text "******* Assumed Type Signatures *************")
$$ (pprintLongList $ asmSigs spec)
$$ (text "******* DataCon Specifications (Measure) ****")
$$ (pprintLongList $ ctors spec)
$$ (text "******* Measure Specifications **************")
$$ (pprintLongList $ meas spec)
instance PPrint GhcInfo where
pprint info = (text "*************** Imports *********************")
$+$ (intersperse comma $ text <$> imports info)
$+$ (text "*************** Includes ********************")
$+$ (intersperse comma $ text <$> includes info)
$+$ (text "*************** Imported Variables **********")
$+$ (pprDoc $ impVars info)
$+$ (text "*************** Defined Variables ***********")
$+$ (pprDoc $ defVars info)
$+$ (text "*************** Specification ***************")
$+$ (pprint $ spec info)
$+$ (text "*************** Core Bindings ***************")
$+$ (pprint $ cbs info)
instance Show GhcInfo where
show = showpp
instance PPrint [CoreBind] where
pprint = pprDoc . tidyCBs
instance PPrint TargetVars where
pprint AllVars = text "All Variables"
pprint (Only vs) = text "Only Variables: " <+> pprint vs
------------------------------------------------------------------------
-- Dealing With Errors -------------------------------------------------
------------------------------------------------------------------------
-- | Convert a GHC error into one of ours
instance Result SourceError where
result = (`Crash` "Invalid Source")
. concatMap errMsgErrors
. bagToList
. srcErrorMessages
errMsgErrors e = [ ErrGhc (errMsgSpan e) (pprint e)]
|
mightymoose/liquidhaskell
|
src/Language/Haskell/Liquid/GhcInterface.hs
|
bsd-3-clause
| 15,977 | 0 | 20 | 4,263 | 4,325 | 2,204 | 2,121 | 287 | 4 |
module GoToSymbolFunction_SymbolOnDeclaration where
test :: Int
test = let s<caret>even = 7 in
seven + 1
|
charleso/intellij-haskforce
|
tests/gold/codeInsight/GoToSymbolFunction_SymbolOnDeclaration.hs
|
apache-2.0
| 112 | 2 | 9 | 23 | 38 | 19 | 19 | -1 | -1 |
{-| Some utility functions, based on the Confd client, providing data
in a ready-to-use way.
-}
{-
Copyright (C) 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Confd.ClientFunctions
( getInstances
, getInstanceDisks
) where
import Control.Monad (liftM)
import qualified Text.JSON as J
import Ganeti.BasicTypes as BT
import Ganeti.Confd.Types
import Ganeti.Confd.Client
import Ganeti.Objects
-- | Get the list of instances the given node is ([primary], [secondary]) for.
-- The server address and the server port parameters are mainly intended
-- for testing purposes. If they are Nothing, the default values will be used.
getInstances
:: String
-> Maybe String
-> Maybe Int
-> BT.ResultT String IO ([Ganeti.Objects.Instance], [Ganeti.Objects.Instance])
getInstances node srvAddr srvPort = do
client <- liftIO $ getConfdClient srvAddr srvPort
reply <- liftIO . query client ReqNodeInstances $ PlainQuery node
case fmap (J.readJSON . confdReplyAnswer) reply of
Just (J.Ok instances) -> return instances
Just (J.Error msg) -> fail msg
Nothing -> fail "No answer from the Confd server"
-- | Get the list of disks that belong to a given instance
-- The server address and the server port parameters are mainly intended
-- for testing purposes. If they are Nothing, the default values will be used.
getDisks
:: Ganeti.Objects.Instance
-> Maybe String
-> Maybe Int
-> BT.ResultT String IO [Ganeti.Objects.Disk]
getDisks inst srvAddr srvPort = do
client <- liftIO $ getConfdClient srvAddr srvPort
reply <- liftIO . query client ReqInstanceDisks . PlainQuery . uuidOf $ inst
case fmap (J.readJSON . confdReplyAnswer) reply of
Just (J.Ok disks) -> return disks
Just (J.Error msg) -> fail msg
Nothing -> fail "No answer from the Confd server"
-- | Get the list of instances on the given node along with their disks
-- The server address and the server port parameters are mainly intended
-- for testing purposes. If they are Nothing, the default values will be used.
getInstanceDisks
:: String
-> Maybe String
-> Maybe Int
-> BT.ResultT String IO [(Ganeti.Objects.Instance, [Ganeti.Objects.Disk])]
getInstanceDisks node srvAddr srvPort =
liftM (uncurry (++)) (getInstances node srvAddr srvPort) >>=
mapM (\i -> liftM ((,) i) (getDisks i srvAddr srvPort))
|
yiannist/ganeti
|
src/Ganeti/Confd/ClientFunctions.hs
|
bsd-2-clause
| 3,572 | 0 | 13 | 616 | 530 | 275 | 255 | 41 | 3 |
{-# LANGUAGE CPP #-}
{- |
Module : $Header$
Description : cpp choice between "GUI.ProofManagement" and "GUI.GtkProverGUI"
Copyright : (c) C. Maeder, Uni Bremen 2002-2005
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable (cpp)
cpp choice between "GUI.ProofManagement" and "GUI.GtkProverGUI"
-}
module GUI.ProverGUI
( proverGUI ) where
import Logic.Comorphism
import Static.GTheory
import Common.Result as Result
import Proofs.AbstractState
import qualified Comorphisms.KnownProvers as KnownProvers
#ifdef GTKGLADE
import GUI.GtkProverGUI
#elif defined UNI_PACKAGE
import Control.Concurrent
import GUI.HTkProverGUI
#endif
proverGUI :: ProofActions -- ^ record of possible GUI actions
-> String -- ^ theory name
-> String -- ^ warning information
-> G_theory -- ^ theory
-> KnownProvers.KnownProversMap -- ^ map of known provers
-> [(G_prover, AnyComorphism)] -- ^ list of suitable provers and comorphisms
-> IO (Result.Result G_theory)
#ifdef GTKGLADE
proverGUI = showProverGUI
#elif defined UNI_PACKAGE
proverGUI prGuiAcs thName warningTxt th knownProvers comorphList = do
guiMVar <- newMVar Nothing
proofManagementGUI prGuiAcs thName warningTxt th knownProvers comorphList
guiMVar
#else
proverGUI = error "not implemented"
#endif
|
keithodulaigh/Hets
|
GUI/ProverGUI.hs
|
gpl-2.0
| 1,379 | 0 | 14 | 235 | 116 | 74 | 42 | 16 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ImpredicativeTypes #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- |
-- Module : Data.Array.Accelerate.CUDA.CodeGen.Mapping
-- Copyright : [2008..2014] Manuel M T Chakravarty, Gabriele Keller
-- [2009..2014] Trevor L. McDonell
-- License : BSD3
--
-- Maintainer : Trevor L. McDonell <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
module Data.Array.Accelerate.CUDA.CodeGen.Mapping (
mkMap,
) where
import Language.C.Quote.CUDA
import Foreign.CUDA.Analysis.Device
import Data.Array.Accelerate.Array.Sugar ( Array, Shape, Elt )
import Data.Array.Accelerate.CUDA.AST
import Data.Array.Accelerate.CUDA.CodeGen.Base
-- Apply the given unary function to each element of an array. Each thread
-- processes multiple elements, striding the array by the grid size.
--
-- map :: (Shape sh, Elt a, Elt b)
-- => (Exp a -> Exp b)
-- -> Acc (Array sh a)
-- -> Acc (Array sh b)
--
mkMap :: forall aenv sh a b. (Shape sh, Elt a, Elt b)
=> DeviceProperties
-> Gamma aenv
-> CUFun1 aenv (a -> b)
-> CUDelayedAcc aenv sh a
-> [CUTranslSkel aenv (Array sh b)]
mkMap dev aenv fun arr
| CUFun1 dce f <- fun
, CUDelayed _ _ (CUFun1 _ get) <- arr
= return
$ CUTranslSkel "map" [cunit|
$esc:("#include <accelerate_cuda.h>")
$edecls:texIn
extern "C" __global__ void
map
(
$params:argIn,
$params:argOut
)
{
const int shapeSize = $exp:(csize shOut);
const int gridSize = $exp:(gridSize dev);
int ix;
for ( ix = $exp:(threadIdx dev)
; ix < shapeSize
; ix += gridSize )
{
$items:(dce x .=. get ix)
$items:(setOut "ix" .=. f x)
}
}
|]
where
(texIn, argIn) = environment dev aenv
(argOut, shOut, setOut) = writeArray "Out" (undefined :: Array sh b)
(x, _, _) = locals "x" (undefined :: a)
ix = [cvar "ix"]
|
flowbox-public/accelerate-cuda
|
Data/Array/Accelerate/CUDA/CodeGen/Mapping.hs
|
bsd-3-clause
| 2,221 | 0 | 14 | 686 | 329 | 197 | 132 | 27 | 1 |
module Concat () where
import Language.Haskell.Liquid.Prelude
------------------------------------------------------------
------------ Longer Version of neg/polypred.hs -------------
------------------------------------------------------------
foo :: [[Int]]
foo = [[choose 1], [choose 2]]
-- concatmap f ls = concat $ map f ls
myconcat [] = []
myconcat (x:xs) = x ++ (myconcat xs)
myconcat1 :: a -> [[Int]] -> [Int]
myconcat1 _ [] = []
myconcat1 f (x:xs) = x ++ (myconcat1 f xs)
concat1 f = concat
myconcat2 f = myconcat
r :: Int
r = 5
prop x = liquidAssertB (x == r)
-- ok
-- propC0 = map prop $ myconcat foo
-- this is safe
-- propC1 = map prop $ myconcat foo
-- propC2 = map prop $ concat foo
-- propC3 = map prop $ concat1 id foo
propC4 = map prop $ myconcat1 id foo
|
mightymoose/liquidhaskell
|
tests/neg/concat1.hs
|
bsd-3-clause
| 797 | 0 | 8 | 157 | 221 | 125 | 96 | 15 | 1 |
{-# LANGUAGE FlexibleContexts, FlexibleInstances, GeneralizedNewtypeDeriving, OverlappingInstances, UndecidableInstances #-}
module Main where
import Control.Applicative
class (Monad m) => MonadIO m where
-- | Lift a computation from the 'IO' monad.
liftIO :: IO a -> m a
instance MonadIO IO where
liftIO = id
class XMLGenerator m where
genElement :: (Maybe String, String) -> m ()
newtype IdentityT m a = IdentityT { runIdentityT :: m a }
deriving (Functor, Applicative, Monad, MonadIO)
instance (MonadIO m) => (XMLGenerator (IdentityT m)) where
genElement _ = liftIO $ putStrLn "in genElement"
main :: IO ()
main =
do runIdentityT web
putStrLn "done."
class (Widgets x) => MonadRender x
class (XMLGenerator m) => Widgets m
-- instance Widgets (IdentityT IO) -- if you uncomment this, it will work
instance MonadRender m => Widgets m
instance MonadRender (IdentityT IO)
web :: ( MonadIO m
, Widgets m
, XMLGenerator m
) => m ()
web =
do liftIO $ putStrLn "before"
genElement (Nothing, "p")
return ()
|
manyoo/ghcjs
|
test/ghc/typecheck/t5751.hs
|
mit
| 1,085 | 1 | 10 | 245 | 323 | 163 | 160 | -1 | -1 |
module Parser
(
parseExpr
, parseExprList
) where
import Control.Applicative ((<$>), (<*>), (<|>))
import Text.ParserCombinators.Parsec hiding (spaces, (<|>))
import Types
spaces :: Parser ()
spaces = skipMany1 space
symbol :: Parser Char
symbol = oneOf "!#$%&|*+-/:<=>?@^_~"
parseExpr :: Parser LispVal
parseExpr =
parseAtom
<|> parseString
<|> parseNumber
<|> parseQuoted
<|> do
_ <- char '('
x <- try parseList <|> parseDottedList
_ <- char ')'
return x
parseExprList :: Parser [LispVal]
parseExprList = endBy parseExpr spaces
parseStringChar :: Parser Char
parseStringChar =
(char '\\' >> (
(char 'n' >> return '\n')
<|> (char 'r' >> return '\r')
<|> (char 't' >> return '\t')
<|> char '\\'
<|> char '"'
))
<|> noneOf "\""
parseString :: Parser LispVal
parseString = do
_ <- char '"'
s <- many parseStringChar
_ <- char '"'
return $ String s
parseAtom :: Parser LispVal
parseAtom = do
first <- letter <|> symbol
rest <- many (letter <|> digit <|> symbol)
let atom = first:rest
return $ case atom of
"#t" -> Bool True
"#f" -> Bool False
_ -> Atom atom
parseNumber :: Parser LispVal
parseNumber = Number . read <$> many1 digit
parseList :: Parser LispVal
parseList = List <$> sepBy parseExpr spaces
parseDottedList :: Parser LispVal
parseDottedList = DottedList
<$> endBy parseExpr spaces
<*> (char '.' >> spaces >> parseExpr)
parseQuoted :: Parser LispVal
parseQuoted = List . (Atom "quote" :) . (:[]) <$> (char '\'' >> parseExpr)
|
frasertweedale/wyas
|
src/Parser.hs
|
mit
| 1,553 | 0 | 15 | 355 | 555 | 279 | 276 | 58 | 3 |
-- Phone Directory
-- https://www.codewars.com/kata/56baeae7022c16dd7400086e
module Codewars.G964.Phonedir(phone) where
import Data.Char(isDigit)
import Data.List(intercalate)
import Data.Maybe(listToMaybe, mapMaybe)
import Control.Applicative((<|>))
import Text.ParserCombinators.ReadP(readP_to_S, satisfy, get, many, char, many1, pfail, sepBy1, between)
data Person = Person { name :: String, pNumber :: String, address :: String}
instance Show Person where
show (Person n p a) = "Phone => " ++ p ++ ", " ++
"Name => " ++ n ++ ", " ++
"Address => " ++ a
addressChars = ['0'..'9'] ++ ['A'..'Z'] ++ ['a'..'z'] ++ ".-"
process = fmap record . listToMaybe . map fst . filter (null . snd) . readP_to_S phoneNameRest
where phoneNameRest = many get >>= \a -> nameOrPhone >>= \b -> many get >>= \c -> nameOrPhone >>= \d -> many get >>= \e -> return (if isDigit . head $ b then (b, d) else (d, b), unwords [a, c, e])
phoneNumber = char '+' >> sepBy1 (many1 digit) (char '-') >>= \ds -> if validPhone ds then return . intercalate "-" $ ds else pfail
validPhone x = length x == 4 && ((\(c, ns) -> all (<=2) c && ns == [3,3,4]) . splitAt 1 . map length $ x)
nameOrPhone = between (char '<') (char '>') (many1 get) <|> phoneNumber
digit = satisfy isDigit
record ((p, n), rest) = Person {name = n, pNumber = p, address = clean rest}
clean = unwords . filter (not . null) . map (filter (`elem` addressChars )) . words . map (\c -> if c == '_' then ' ' else c)
phone :: String -> String -> String
phone dr num = present . filter ((==num) . pNumber) . mapMaybe process . lines $ dr
where present [] = "Error => Not found: " ++ num
present [x] = show x
present (x:xs) = "Error => Too many people: " ++ num
|
gafiatulin/codewars
|
src/5 kyu/Phonedir.hs
|
mit
| 1,841 | 0 | 21 | 456 | 756 | 415 | 341 | 25 | 4 |
-- Generated by protobuf-simple. DO NOT EDIT!
module Types.UInt64Msg where
import Control.Applicative ((<$>))
import Prelude ()
import qualified Data.ProtoBufInt as PB
newtype UInt64Msg = UInt64Msg
{ value :: PB.Word64
} deriving (PB.Show, PB.Eq, PB.Ord)
instance PB.Default UInt64Msg where
defaultVal = UInt64Msg
{ value = PB.defaultVal
}
instance PB.Mergeable UInt64Msg where
merge a b = UInt64Msg
{ value = PB.merge (value a) (value b)
}
instance PB.Required UInt64Msg where
reqTags _ = PB.fromList [PB.WireTag 1 PB.VarInt]
instance PB.WireMessage UInt64Msg where
fieldToValue (PB.WireTag 1 PB.VarInt) self = (\v -> self{value = PB.merge (value self) v}) <$> PB.getUInt64
fieldToValue tag self = PB.getUnknown tag self
messageToFields self = do
PB.putUInt64 (PB.WireTag 1 PB.VarInt) (value self)
|
sru-systems/protobuf-simple
|
test/Types/UInt64Msg.hs
|
mit
| 844 | 0 | 13 | 157 | 296 | 159 | 137 | 20 | 0 |
series = [x^x|x<-[1..1000]]
ans = sum series
|
stefan-j/ProjectEuler
|
q48.hs
|
mit
| 51 | 1 | 8 | 12 | 37 | 18 | 19 | 2 | 1 |
-- This file is covered by an MIT license. See 'LICENSE' for details.
-- Author: Bertram Felgenhauer
{-# LANGUAGE OverloadedStrings #-}
module Confluence.Direct.HuetToyamaOostrom (
confluent
) where
import Text.PrettyPrint.ANSI.Leijen
import Data.Rewriting.CriticalPair as C
import Data.Rewriting.Rules
import Confluence.Types
import Framework.Types
import Framework.Explain
import Util.Pretty
import Development as D
confluent :: (Show f, Show v, PPretty f, PPretty v, Ord f, Ord v) => Problem f v -> Explain Answer
confluent trs = section "Huet-Toyama-van-Oostrom (development closed)" $ do
if not (isLeftLinear trs) then do
tell "Not left-linear."
return Maybe
else do
let i = cpsIn' trs
o = cpsOut' trs
a <- processInner trs i
if a /= Yes then return a else do
processOuter trs o
processInner trs [] = return Yes
processInner trs (c:cs) = do
tell $ "Considering the inner critical pair"
tell $ nest 4 $ vcat [
ppretty (C.top c),
" =>" <+> ppretty (C.left c),
" ><" <+> ppretty (C.right c)]
let l = development trs (C.left c)
r = [D.fromTerm (C.right c)]
if null (D.toList $ D.simplify $ D.intersect l r) then do
tell "not joinable in a development step."
return Maybe
else do
-- tell "l"
-- tell $ nest 4 $ vcat $ map ppretty (D.toList l)
-- tell "r"
-- tell $ nest 4 $ vcat $ map ppretty (D.toList r)
tell "l & r"
tell $ nest 4 $ vcat $ map ppretty (D.toList $ D.intersect l r)
tell "joinable in a development step."
processInner trs cs
processOuter trs [] = return Yes
processOuter trs (c:cs) = do
tell $ "Considering the outer critical pair"
tell $ nest 4 $ vcat [
ppretty (C.top c),
" =>" <+> ppretty (C.left c),
" ><" <+> ppretty (C.right c)]
let l = development trs (C.left c)
r = development trs (C.right c)
if null (D.toList $ D.simplify $ D.intersect l r) then do
tell "not joinable in a pair of development steps."
return Maybe
else do
tell "l & r"
tell $ nest 4 $ vcat $ map ppretty (D.toList $ D.intersect l r)
tell "joinable using development steps."
processOuter trs cs
|
haskell-rewriting/confluence-tool
|
src/Confluence/Direct/HuetToyamaOostrom.hs
|
mit
| 2,344 | 0 | 15 | 706 | 735 | 357 | 378 | 56 | 3 |
{-# htermination realToFrac :: (Ratio Int) -> Float #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_realToFrac_4.hs
|
mit
| 56 | 0 | 2 | 9 | 3 | 2 | 1 | 1 | 0 |
{-# LANGUAGE OverloadedStrings #-}
import qualified Data.Char as Char
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.IO as IO
import qualified Numeric
main = do
strings <- Text.lines <$> IO.getContents
let stringLiterals = map encode strings
let sizeOfStringLiterals = sum $ map Text.length stringLiterals
let sizeOfStrings = sum $ map Text.length strings
print sizeOfStringLiterals
print sizeOfStrings
print $ sizeOfStringLiterals - sizeOfStrings
encode :: Text -> Text
encode string = Text.concat ["\"", Text.concatMap encodeChar string, "\""]
where
encodeChar '\\' = "\\\\"
encodeChar '"' = "\\\""
encodeChar char = Text.singleton char
|
SamirTalwar/advent-of-code
|
2015/AOC_08_2.hs
|
mit
| 709 | 1 | 12 | 123 | 211 | 107 | 104 | 19 | 3 |
module Web.TSBot.ClientQuery.PrettyPrintSpec (spec) where
import Web.TSBot.ClientQuery.PrettyPrint ()
import Test.Hspec
spec :: Spec
spec = it "is" pending
|
taktoa/TSBot
|
test-suite/Web/TSBot/ClientQuery/PrettyPrintSpec.hs
|
mit
| 159 | 0 | 5 | 20 | 43 | 27 | 16 | 5 | 1 |
module TypeChecksAssignment where
--- Person
data Person = Person Bool deriving Show
printPerson :: Person -> IO()
printPerson person = putStrLn( show person )
--- Mood
data Mood = Blah
| Woot deriving (Show, Eq)
settleDown x = if x == Woot
then Blah
else x
--- Sentence
type Subject = String
type Verb = String
type Object = String
data Sentence =
Sentence Subject Verb Object
deriving (Eq, Show)
s1 = Sentence "dogs" "drool"
s2 = Sentence "Julie" "loves" "dog"
--- Papu
data Rocks =
Rocks String deriving (Eq, Show)
data Yeah =
Yeah Bool deriving (Eq, Show)
data Papu =
Papu Rocks Yeah
deriving (Eq, Show)
-- it actually works:
equalityForall :: Papu -> Papu -> Bool; equalityForall p p' = p == p'
|
Lyapunov/haskell-programming-from-first-principles
|
chapter_6/typechecks.hs
|
mit
| 771 | 0 | 7 | 197 | 247 | 138 | 109 | 25 | 2 |
-- A solution, but incredibly slow. One to work on.
module Main where
import Data.List
main :: IO ()
main = do
putStrLn $ show $ largestPrimeFactor 600851475143
factorize :: Int -> [Int]
factorize n = filter (\x -> (n `mod` x == 0)) [1 .. n]
largestPrimeFactor :: Int -> Int
largestPrimeFactor n = maximum $ filter (\f -> factorize f == [1,f]) (factorize n)
|
Praenomen/My-Project-Euler
|
Haskell/Problem003.hs
|
mit
| 366 | 0 | 10 | 74 | 144 | 79 | 65 | 9 | 1 |
{-
Copyright (c) 2015 Nils 'bash0r' Jonsson
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{- |
Module : $Header$
Description : Conversion from a main module to JavaScript.
Author : Nils 'bash0r' Jonsson
Copyright : (c) 2015 Nils 'bash0r' Jonsson
License : MIT
Maintainer : [email protected]
Stability : unstable
Portability : non-portable (Portability is untested.)
Conversion from a main module to JavaScript.
-}
module Language.PolyDSL.Transformation.JavaScript.MainModule
( VirtualResolver
, MainModule
) where
import Control.Applicative
import Control.Monad
import Language.JavaScript hiding (when)
import Language.Transformation.Protocol
import Language.Transformation.Semantics
import Language.PolyDSL.Lib
import qualified Language.PolyDSL.DOM as DOM
import Language.PolyDSL.Transformation.JavaScript.Internal
-- | A concrete main module.
type MainModule = MainModuleT VirtualResolverT DOM.ModuleT String
-- | A virtual resolver for compilation units.
type VirtualResolver = VirtualResolverT DOM.ModuleT String
instance CompilationUnitResolver VirtualResolverT where
resolveCompilationUnit (VirtualResolver ms) m = do
let ms' = filter (\m' -> unitName m' == m) ms
when (null ms') (fail ("Module " ++ show m ++ " is not in scope."))
when (length ms' /= 1) (fail ("Module name " ++ show m ++ " exists multiple times."))
let (m:_) = ms'
return m
|
project-horizon/framework
|
src/lib/Language/PolyDSL/Transformation/JavaScript/MainModule.hs
|
mit
| 2,550 | 0 | 15 | 488 | 255 | 141 | 114 | 22 | 0 |
{-# LANGUAGE BangPatterns #-}
module Main where
import Control.Monad.State
import Control.Monad.Writer
import Control.Monad.Reader
import Control.Monad.MonteCarlo
import Control.Monad.Loops
import Control.DeepSeq
import Control.Exception
import System.Random.TF
import Transport.NISTData
import Data.List (foldl')
import Graphics.Gloss hiding (Point, rotate)
----------------------------------------------------------------
-- Datatypes
----------------
data ParticleState = PS
{
noColls :: !Int
, remEnergy :: !Energy
, curPos :: !Point
, curDir :: !Angle
, path :: [(Point,Energy)]
} deriving (Show)
psInit :: ParticleState
psInit = PS 0 5000 (0,0) (0,1) []
type Energy = Float
type Point = (Float,Float)
type Angle = (Float,Float)
----------------------------------------------------------------
-- MonteCarlo
----------------
type Simulation = ReaderT (Float -> (Float,Float)) (StateT ParticleState (MonteCarlo TFGen))
-- Helper function for getting the cross-section data for the current energy
getMu :: Simulation (Float,Float)
getMu = do
en <- gets remEnergy
(t,a) <- asks (\f -> f en)
return (rho*t,rho*a)
where rho = 1 -- g/cm^3 (water)
-- Helper functions for sampling random numbers
uniform :: Simulation Float
uniform = lift (lift random)
uniformR :: (Float,Float) -> Simulation Float
uniformR bounds = lift (lift (randomR bounds))
-- Flies the particle some random distance with prob. according to
-- cross-section data
fly :: Simulation ()
fly = do
(PS i en (x,y) (ux,uy) ps) <- get
(mu_t,_) <- getMu
!eta <- uniform
let s = -(log eta / mu_t)
put (PS i en (x+ux*s,y+uy*s) (ux,uy) ps)
-- The main loop responsible for a single photon's lifetime
loop :: Simulation [(Point,Energy)]
loop = do
untilM_ (fly >> scatter) isBelowCutoff
exit
-- Terminates a particle if its energy is below the cutoff
isBelowCutoff :: Simulation Bool
isBelowCutoff = do
en <- gets remEnergy
return $ en < 10
-- Returns the path stored in the ParticleState
exit :: Simulation [(Point,Energy)]
exit = do
ps <- gets path
return $ ps
-- Randomly determines whether the scattering event scatters left or right
_scatterDir :: Simulation Float
_scatterDir = do
eta <- uniform
return $ if eta >= 0.5 then 1 else (-1)
-- Scattering event; compute scattering angle, record collision site
scatter :: Simulation ()
scatter = do
(PS i en (x,y) (ux,uy) ps) <- get
(mu_t,mu_en) <- getMu
let deltaW = mu_en * en / mu_t
dir <- _scatterDir
let angle = diffAngle en (en-deltaW) * dir
let (ux',uy') = rotate (ux,uy) angle
put (PS (i+1) (en-deltaW) (x,y) (ux',uy') (ps++[((x,y),deltaW)]))
-- Computes the angle to rotate based on energy exchanged in coll.
diffAngle :: Energy -> Energy -> Float
diffAngle en en' = acos $ 1 - 0.511 * (1/en' - 1/en) -- Knuth
-- Rotates a vector
rotate :: Point -> Float -> Point
rotate (x,y) th = (x*cos th - y*sin th, x*sin th + y*cos th)
----------------------------------------------------------------
-- Main
----------------
noRuns :: Int
noRuns = 10000
main :: IO ()
main = do
g <- newTFGen
fnist <- loadData "water.dat"
let unrolled = evalStateT (runReaderT loop fnist) psInit
let bs = experimentP (unrolled)
noRuns (noRuns `div` 200) g :: [[(Point,Energy)]]
evaluate (rnf bs)
let lengthF = fromIntegral . length
let avgCol = (foldl' (+) 0 (map lengthF bs)) / lengthF bs :: Double
putStrLn $ "Average number of collisions: " ++ show avgCol
displayResults bs
displayResults :: [[(Point, Energy)]] -> IO ()
displayResults res = display (InWindow "Sim." (800,800) (200,200))
white (results `mappend` Color white (Line [(0,-100),(0,0)]))
where color' = makeColor8 0 0 0 100
results = mconcat $ map (\p -> Color color' $ Line (map fst p)) res
|
icasperzen/hs-carbon-examples
|
examples-src/Transport/Transport.hs
|
mit
| 4,020 | 0 | 14 | 939 | 1,383 | 750 | 633 | 99 | 2 |
-- Homework01.hs
--
-- EECS 776, Homework 1
-- Author: Stefan Mendoza
-- Date: 25 September 2015
-- Email: [email protected]
module Main where
import Data.Char
rot13 :: String -> String
rot13 = map rotateChar
rotateChar :: Char -> Char
rotateChar c
| c `elem` ['A'..'Z'] =
if (ord c) + 13 > (ord 'Z')
then chr $ (ord c) - 13
else chr $ (ord c) + 13
| c `elem` ['a'..'z'] =
if (ord c) + 13 > (ord 'z')
then chr $ (ord c) - 13
else chr $ (ord c) + 13
| otherwise = c
main :: IO ()
main = do
putStr "Enter a phrase to be rotated: "
s <- getLine
putStrLn $ rot13 s
|
stefanmendoza/EECS-776
|
Homework01.hs
|
mit
| 664 | 0 | 10 | 214 | 245 | 129 | 116 | 20 | 3 |
module Main where
import Data.List
main :: IO ()
main = putStrLn $ show result
--result :: Integer
--result = foldr (*) 1 [1 .. 100000]
result :: Integer
result = foldl' (*) 1 [1 .. 100000]
|
hnfmr/beginning_haskell
|
profiling-example/src/Main.hs
|
mit
| 195 | 0 | 6 | 42 | 58 | 34 | 24 | 6 | 1 |
module FuzzyMatch where
matchingCount xs ys = length $ filter (uncurry (==)) $ zip xs ys
bigrams [] = []
bigrams [_] = []
bigrams xs@(_:xs') = take 2 xs : bigrams xs'
intersection [] _ = []
intersection (x:xs) ys = if x `elem` ys
then x : intersection xs ys
else intersection xs ys
diceIndex x y = fromIntegral (2*nT) / fromIntegral (nX + nY)
where
nT = length $ intersection bX bY
nX = length bX
nY = length bY
bX = bigrams x
bY = bigrams y
hammingDistance [] [] = Just 0
hammingDistance [] _ = Nothing
hammingDistance _ [] = Nothing
hammingDistance (x:xs) (y:ys)
| x == y = case hammingDistance xs ys of
Nothing -> Nothing
Just n -> Just (n+1)
| otherwise = hammingDistance xs ys
-- http://en.wikipedia.org/wiki/Levenshtein_distance
levenshteinDistance xs [] = length xs
levenshteinDistance [] ys = length ys
levenshteinDistance xs@(x:xs') ys@(y:ys') =
min m1 (levenshteinDistance xs' ys' + if x==y then 0 else 1)
where
m1 = min (levenshteinDistance xs ys' + 1) (levenshteinDistance xs' ys + 1)
-- TODO: Implementation should be based on: http://xlinux.nist.gov/dads//HTML/jaroWinkler.html standard.
jaroWrinkler sOne sTwo = undefined
|
dewaka/fuzzy_string_match
|
Text/FuzzyMatch.hs
|
mit
| 1,258 | 0 | 11 | 313 | 494 | 250 | 244 | 29 | 2 |
module Proteome.Test.MultiTagsTest where
import Hedgehog ((===))
import Path (File, Rel, parseAbsDir, parseRelDir, relfile, toFilePath, (</>))
import Path.IO (doesFileExist)
import Ribosome.Config.Setting (updateSetting)
import Ribosome.Nvim.Api.IO (bufferGetOption, vimGetCurrentBuffer)
import Ribosome.Test.Run (UnitTest)
import Ribosome.Test.Unit (tempDir)
import Proteome.Add (proAdd)
import Proteome.BufEnter (bufEnter)
import Proteome.Data.AddOptions (AddOptions (AddOptions))
import Proteome.Data.ProjectConfig (ProjectConfig (ProjectConfig))
import qualified Proteome.Settings as Settings (projectConfig, tagsArgs, tagsCommand, tagsFork)
import Proteome.Tags (proTags)
import Proteome.Test.Config (vars)
import Proteome.Test.Project (cil, cn, createTestProject, flag, fn, hask, tp)
import Proteome.Test.Unit (ProteomeTest, testWithDef)
multiSpec :: ProteomeTest ()
multiSpec = do
projectsDir <- parseAbsDir =<< tempDir "multi/projects"
updateSetting Settings.projectConfig (ProjectConfig [projectsDir] def def def def def def)
createTestProject tp fn
createTestProject tp cn
updateSetting Settings.tagsCommand "touch"
updateSetting Settings.tagsArgs ".tags"
updateSetting Settings.tagsFork False
proAdd $ AddOptions fn tp (Just False)
proAdd $ AddOptions cn tp (Just False)
proTags
bufEnter
haskPath <- parseRelDir (toString hask)
flagPath <- parseRelDir (toString flag)
cilPath <- parseRelDir (toString cil)
let ftags = projectsDir </> haskPath </> flagPath </> [relfile|.tags|]
let ctags = projectsDir </> haskPath </> cilPath </> [relfile|.tags|]
ftagsExists <- doesFileExist ftags
ctagsExists <- doesFileExist ctags
buf <- vimGetCurrentBuffer
tags <- bufferGetOption buf "tags"
ftagsExists === True
ctagsExists === True
toFilePath ftags <> "," <> toFilePath ctags === tags
test_multi :: UnitTest
test_multi = vars >>= testWithDef multiSpec
|
tek/proteome
|
packages/test/test/Proteome/Test/MultiTagsTest.hs
|
mit
| 1,902 | 0 | 12 | 252 | 580 | 312 | 268 | -1 | -1 |
module TiposAbstratosPolimorfismoParametricoLimitado where
import ExpressoesLambdaComposicaoLazyness
-- Voltando agora para o exercício de expressões de ExpressoesLambdaComposicaoLazyness,
-- a representação de expressões como triplas não é tão adequada (representa expressões
-- binárias, não expressões em geral), e é extremamente limitada (subexpressões têm que
-- ser números, não podemos ter subexpressões complexas). Podemos resolver com definições
-- de novos tipos, com novos elementos, em Haskell. Isso é diferente de definir sinônimos
-- para tipos cujos valores já existem em Haskell.
--
-- Além de tipos primitivos, tuplas e listas, podemos ter estruturas definidas pelos
-- programadores, fazendo uso de união (lista é caso particular) de conjuntos de elementos
-- de estruturas diferentes (cada elemento é precedidos por um construtor, e é formado por
-- diferentes componentes).
type Operador = Char
data Expressao = Numero Float
| Unaria Operador Expressao
| Binaria Expressao Operador Expressao
-- Equivalente em Java a classe abstrata (ou interface) e três subclasses.
-- Podemos então definir funções por casamento de padrão, assim como feito com listas.
avaliard (Numero f) = f
avaliard (Binaria e1 op e2) = avOp op (avaliard e1) (avaliard e2)
avaliard (Unaria op e) | op == '-' = -(avaliard e)
| op == '+' = avaliard e
showExpressao (Numero f) = show f
showExpressao (Binaria e1 op e2) = "("++ (showExpressao e1) ++ (showOp op) ++
(showExpressao e2) ++ ")"
showExpressao (Unaria op e) = (showOp op) ++ (showExpressao e)
showOp '+' = " + "
showOp '-' = " - "
showOp '*' = " * "
showOp '/' = " / "
-- Testando
exp7 = Binaria (Numero 4) '+' (Binaria (Numero 6) '/' (Numero 2))
res7 = avaliard exp7
priexp7 = showExpressao exp7
-- Os construtores de tipo são vistos como funções que constroem elementos do tipo.
-- Numero :: Float -> Expressao
-- Binaria :: Expressao -> Operador -> Expressao -> Expressao
-- Unaria :: Operador -> Expressao -> Expressao
--
-- avaliard :: Expressao -> Float
-- Melhorando com datatype para operador. Reduz a quantidade de elementos do tipo criado.
-- Deixa de fora elementos como "Binaria (Numero 5.1) ';' (Numero 5.2)", que não tinham
-- significado para o problema em questão, mas mesmo assim faziam parte do conjunto de
-- elementos do tipo. Em particular, a função avaliard não estava definida para todos os
-- elementos do seu tipo. O que aconteceria se a expressão binária acima fosse passada como
-- parâmetro? Agora poderia-se garantir que todos os casos estão sendo cobertos... Deixa
-- de fora vários caracteres que não representavam operadores. Diminui a quantidade de
-- elementos do tipo; deixa só os representativos.
data OpBinario = Soma | Sub | Mul | Div
data OpUnario = Pos | Neg
data Exp = Literal Float
| Un OpUnario Exp
| Bin Exp OpBinario Exp
-- Tipo definido pode ser polimórfico, parametrizado por tipo dos elementos literais.
-- Expp funciona então como um gerador de tipos; para cada tipo passado como parâmetro
-- para Expp, temos um novo tipo.
data Expp t = Litp t
| Unp OpUnario (Expp t)
| Binp (Expp t) OpBinario (Expp t)
type ExpInt = Expp Int
type ExpStr = Expp String
type ExpFloat = Expp Float
exps = (Litp "4")
expf = (Litp 4.0)
-- Litp :: t -> Expp t
-- Unp :: OpUnario -> Expp t -> Expp t
-- Binp :: Expp t -> OpBinario -> Expp t -> Expp t
-- A função de avaliação precisa então receber como parâmetro uma função
-- que sabe transformar os literais em valores, um avaliador de literais,
-- já que podemos agora trabalhar com literais de diferentes tipos.
eval lit (Litp f) = lit f
eval lit (Binp e1 op e2) = evBOp op (eval lit e1) (eval lit e2)
eval lit (Unp Neg e) = -(eval lit e)
eval lit (Unp Pos e) = eval lit e
evBOp Soma = (+)
evBOp Sub = (-)
evBOp Mul = (*)
evBOp Div = (/)
-- Observando o tipo de eval, vemos que o avaliador de literais tem
-- que retornar um elemento de um tipo "a", mas não um "a" qualquer, um
-- "a" que seja Fractional, um subtipo de Num que aceita divisão (/).
--
-- eval :: Fractional a => (t -> a) -> Expp t -> a
-- evBOp :: Fractional a => OpBinario -> a -> a -> a
-- Para expressões com literais que são números, o avaliador de literais
-- é simplesmente a função identidade.
exp8 = Binp (Litp 5) Soma (Binp (Litp 6) Div (Litp 2))
res8 = eval (\x -> x) exp8
-- Para expressões com literais que são strings, o avaliador de literais
-- é uma função que transforma strings em números.
exp9 = Binp (Litp "6") Soma (Binp (Litp "6") Div (Litp "2"))
res9 = eval (\x -> (read x)) exp9
-- Tipos existentes em Haskell poderiam ter sido definidos assim... e até outros...
-- Quais as diferenças entre os elementos dos três tipos de árvores definidos?
data List t = Nil | Cons t (List t)
data Tree t = NilT | Node t (Tree t) (Tree t)
data TTree t = Leaf t | NNode (TTree t) (TTree t)
data TTTree t = NilTTT | NNNode t [(TTTree t)]
-- Algo similar ao tipo de eval ocorre com o tipo de == e show
--
-- (==) :: Eq a => a -> a -> Bool
-- show :: Show a => a -> String
--
-- Elas não funcionam para um tipo a qualquer. Funcionam para qualquer
-- tipo a que satisfaz algumas propriedades, que contém algumas funções
-- específicas. A função não serve apenas para um tipo específico (função
-- não polimórfica). Nem é completamente genérica, servindo para qualquer
-- tipo (polimorfismo paramétrico ). É um meio termo, a função serve para
-- um subconjunto dos tipos da linguagem, uma classe de tipos, o parâmetro
-- tem que satisfazer algumas condições (bounded parametric polymorphism).
|
pauloborba/plc
|
src/TiposAbstratosPolimorfismoParametricoLimitado.hs
|
cc0-1.0
| 5,798 | 0 | 10 | 1,203 | 911 | 507 | 404 | 50 | 1 |
{- An example yi.hs that uses the Vim keymap with these additions:
- Always uses the VTY UI by default.
- The color style is darkBlueTheme
- The insert mode of the Vim keymap has been extended with a few additions
I find useful.
-}
import Yi.Prelude
import Prelude ()
import Yi
import Yi.Keymap.Vim
import Yi.Buffer.Indent (indentAsPreviousB)
import Yi.Keymap.Keys
import Yi.Misc (adjBlock)
import qualified Yi.UI.Vty
import Yi.Style.Library (darkBlueTheme)
import Data.List (isPrefixOf, reverse, replicate)
import Control.Monad (replicateM_)
-- Set soft tabs of 4 spaces in width.
prefIndent :: Mode s -> Mode s
prefIndent m = m {
modeIndentSettings = IndentSettings
{
expandTabs = True,
shiftWidth = 4,
tabSize = 4
}}
noHaskellAnnots m
| modeName m == "haskell" = m { modeGetAnnotations = modeGetAnnotations emptyMode }
| otherwise = m
main :: IO ()
main = yi $ defaultConfig
{
-- Use VTY as the default UI.
startFrontEnd = Yi.UI.Vty.start,
defaultKm = mkKeymap extendedVimKeymap,
modeTable = fmap (onMode $ noHaskellAnnots . prefIndent) (modeTable defaultConfig),
configUI = (configUI defaultConfig)
{
configTheme = darkBlueTheme
}
}
extendedVimKeymap = defKeymap `override` \super self -> super
{
v_top_level =
(deprioritize >> v_top_level super)
-- On 'o' in normal mode I always want to use the indent of the previous line.
-- TODO: If the line where the newline is to be inserted is inside a
-- block comment then the block comment should be "continued"
-- TODO: Ends up I'm trying to replicate vim's "autoindent" feature. This
-- should be made a function in Yi.
<|> (char 'o' ?>> beginIns self $ do
moveToEol
insertB '\n'
indentAsPreviousB
)
-- On HLX (Haskell Language Extension) I want to go into insert mode such
-- that the cursor position is correctly placed to start entering the name
-- of an language extension in a LANGUAGE pragma.
-- A language pragma will take either the form
-- {-# LANGUAGE Foo #-}
-- or
-- >{-# LANGUAGE Foo #-}
-- The form should be chosen based on the current mode.
<|> ( pString "HXL" >> startExtesnionNameInsert self ),
v_ins_char =
(deprioritize >> v_ins_char super)
-- On enter I always want to use the indent of previous line
-- TODO: If the line where the newline is to be inserted is inside a
-- block comment then the block comment should be "continued"
-- TODO: Ends up I'm trying to replicate vim's "autoindent" feature. This
-- should be made a function in Yi.
<|> ( spec KEnter ?>>! do
insertB '\n'
indentAsPreviousB
)
-- I want softtabs to be deleted as if they are tabs. So if the
-- current col is a multiple of 4 and the previous 4 characters
-- are spaces then delete all 4 characters.
-- TODO: Incorporate into Yi itself.
<|> ( spec KBS ?>>! do
c <- curCol
line <- readRegionB =<< regionOfPartB Line Backward
sw <- indentSettingsB >>= return . shiftWidth
let indentStr = replicate sw ' '
toDel = if (c `mod` sw) /= 0
then 1
else if indentStr `isPrefixOf` reverse line
then sw
else 1
adjBlock (-toDel)
replicateM_ toDel $ deleteB Character Backward
)
-- On starting to write a block comment I want the close comment
-- text inserted automatically.
<|> choice
[ pString open_tag >>! do
insertN $ open_tag ++ " \n"
indentAsPreviousB
insertN $ " " ++ close_tag
lineUp
| (open_tag, close_tag) <-
[ ("{-", "-}") -- Haskell block comments
, ("/*", "*/") -- C++ block comments
]
]
}
startExtesnionNameInsert :: ModeMap -> I Event Action ()
startExtesnionNameInsert self = beginIns self $ do
p_current <- pointB
m_current <- getMarkB (Just "'")
setMarkPointB m_current p_current
moveTo $ Point 0
insertB '\n'
moveTo $ Point 0
insertN "{-# LANGUAGE "
p <- pointB
insertN " #-}"
moveTo p
|
codemac/yi-editor
|
src/Yi/Users/Corey.hs
|
gpl-2.0
| 4,927 | 0 | 21 | 1,912 | 753 | 402 | 351 | 73 | 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.